gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package jshinter.analyzer; import java.util.Arrays; import java.util.List; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.TerminalNode; import jshinter.antlr.ECMAScriptBaseListener; import jshinter.antlr.ECMAScriptParser; import jshinter.antlr.ECMAScriptParser.AssignmentExpressionContext; import jshinter.antlr.ECMAScriptParser.AssignmentOperatorContext; import jshinter.antlr.ECMAScriptParser.BitAndExpressionContext; import jshinter.antlr.ECMAScriptParser.BitNotExpressionContext; import jshinter.antlr.ECMAScriptParser.BitOrExpressionContext; import jshinter.antlr.ECMAScriptParser.BitShiftExpressionContext; import jshinter.antlr.ECMAScriptParser.BitXOrExpressionContext; import jshinter.antlr.ECMAScriptParser.BlockContext; import jshinter.antlr.ECMAScriptParser.BreakStatementContext; import jshinter.antlr.ECMAScriptParser.ContinueStatementContext; import jshinter.antlr.ECMAScriptParser.DebuggerStatementContext; import jshinter.antlr.ECMAScriptParser.DoStatementContext; import jshinter.antlr.ECMAScriptParser.EosContext; import jshinter.antlr.ECMAScriptParser.EqualityExpressionContext; import jshinter.antlr.ECMAScriptParser.ExpressionStatementContext; import jshinter.antlr.ECMAScriptParser.ForInStatementContext; import jshinter.antlr.ECMAScriptParser.ForStatementContext; import jshinter.antlr.ECMAScriptParser.ForVarInStatementContext; import jshinter.antlr.ECMAScriptParser.ForVarStatementContext; import jshinter.antlr.ECMAScriptParser.FormalParameterListContext; import jshinter.antlr.ECMAScriptParser.FunctionBodyContext; import jshinter.antlr.ECMAScriptParser.FunctionDeclarationContext; import jshinter.antlr.ECMAScriptParser.FunctionExpressionContext; import jshinter.antlr.ECMAScriptParser.IdentifierExpressionContext; import jshinter.antlr.ECMAScriptParser.IfStatementContext; import jshinter.antlr.ECMAScriptParser.InitialiserContext; import jshinter.antlr.ECMAScriptParser.IterationStatementContext; import jshinter.antlr.ECMAScriptParser.MemberDotExpressionContext; import jshinter.antlr.ECMAScriptParser.NewExpressionContext; import jshinter.antlr.ECMAScriptParser.NotExpressionContext; import jshinter.antlr.ECMAScriptParser.ProgramContext; import jshinter.antlr.ECMAScriptParser.ReturnStatementContext; import jshinter.antlr.ECMAScriptParser.SingleExpressionContext; import jshinter.antlr.ECMAScriptParser.SourceElementsContext; import jshinter.antlr.ECMAScriptParser.StatementContext; import jshinter.antlr.ECMAScriptParser.ThrowStatementContext; import jshinter.antlr.ECMAScriptParser.TryStatementContext; import jshinter.antlr.ECMAScriptParser.TypeofExpressionContext; import jshinter.antlr.ECMAScriptParser.VariableDeclarationContext; import jshinter.antlr.ECMAScriptParser.VariableStatementContext; import jshinter.antlr.ECMAScriptParser.WhileStatementContext; import jshinter.antlr.ECMAScriptParser.WithStatementContext; import jshinter.utility.ScopeManager; import jshinter.utility.ScopeType; public class JSHinterListener extends ECMAScriptBaseListener { private ECMAScriptParser parser; private ScopeManager scopeManager; private int MAX_DEPTH = 0; private boolean verifyMaxDepth = false; private int MAX_PARAMS = 0; private boolean verifyMaxParams = false; private int MAX_STATEMENTS = 0; private boolean verifyMaxStatements = false; private Integer blockDepth = -1; public JSHinterListener(ECMAScriptParser parser) { this.parser = parser; scopeManager = new ScopeManager(); } public void verifyMaxDepth(int maxDepth) { if (verifyMaxDepth = maxDepth > 0) { MAX_DEPTH = maxDepth; } } public void verifyMaxParams(int maxParams) { if (verifyMaxParams = maxParams > 0) { MAX_PARAMS = maxParams; } } public void verifyMaxStatements(int maxStatements) { if (verifyMaxStatements = maxStatements > 0) { MAX_STATEMENTS = maxStatements; } } private void reportError(String msg, Token t) { System.out.printf("%d,%d: %s.\n", t.getLine(), t.getCharPositionInLine() + 1, msg); } @Override public void enterBitXOrExpression(BitXOrExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); reportError(String.format("Unexpected use of '%s'", ctx.getChild(1).getText()), ts.get(ctx.getChild(1).getSourceInterval().a)); } @Override public void enterBitShiftExpression(BitShiftExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); reportError(String.format("Unexpected use of '%s'", ctx.getChild(1).getText()), ts.get(ctx.getChild(1).getSourceInterval().a)); } @Override public void enterBitNotExpression(BitNotExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); reportError(String.format("Unexpected use of '%s'", ctx.getChild(1).getText()), ts.get(ctx.getChild(1).getSourceInterval().a)); } @Override public void enterBitAndExpression(BitAndExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); reportError(String.format("Unexpected use of '%s'", ctx.getChild(1).getText()), ts.get(ctx.getChild(1).getSourceInterval().a)); } @Override public void enterBitOrExpression(BitOrExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); reportError(String.format("Unexpected use of '%s'", ctx.getChild(1).getText()), ts.get(ctx.getChild(1).getSourceInterval().a)); } @Override public void exitAssignmentExpression(AssignmentExpressionContext ctx) { SingleExpressionContext leftSideAssign = ctx.singleExpression(0); if (leftSideAssign instanceof MemberDotExpressionContext) { checkFreeze(leftSideAssign); } } private void checkFreeze(SingleExpressionContext leftSideAssign) { SingleExpressionContext prototypeExpression = getPrototype(leftSideAssign); if (prototypeExpression == null) { return; } List<String> nativeObjects = Arrays.asList( "Array", "ArrayBuffer", "Boolean", "Collator", "DataView", "Date", "DateTimeFormat", "Error", "EvalError", "Float32Array", "Float64Array", "Function", "Infinity", "Intl", "Int16Array", "Int32Array", "Int8Array", "Iterator", "Number", "NumberFormat", "Object", "RangeError", "ReferenceError", "RegExp", "StopIteration", "String", "SyntaxError", "TypeError", "Uint16Array", "Uint32Array", "Uint8Array", "Uint8ClampedArray", "URIError"); MemberDotExpressionContext exp = (MemberDotExpressionContext) prototypeExpression; SingleExpressionContext object = exp.singleExpression(); if (nativeObjects.contains(object.getText())) { TokenStream ts = parser.getTokenStream(); Token t = ts.get(leftSideAssign.getChild(0).getSourceInterval().b); reportError(String.format("Extending prototype of native object: '%s'", object.getText()), t); } } private SingleExpressionContext getPrototype(SingleExpressionContext ctx) { if (!(ctx instanceof MemberDotExpressionContext)) { return null; } MemberDotExpressionContext expression = (MemberDotExpressionContext) ctx; String tokenText = expression.identifierName().getText(); if (tokenText.equals("prototype")) { return expression; } return getPrototype(expression.singleExpression()); } @Override public void enterAssignmentOperator(AssignmentOperatorContext ctx) { Token t = ctx.getStart(); int type = t.getType(); if (type >= 45 && type <= 50) { reportError(String.format("Unexpected use of '%s'", t.getText()), t); } } @Override public void enterEqualityExpression(EqualityExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); Token t = ts.get(ctx.getChild(1).getSourceInterval().b); String operator = t.getText(); String shouldBe = null; if (operator.equals("==")) { shouldBe = "==="; } else if (operator.equals("!=")) { shouldBe = "!=="; } if (shouldBe != null) { reportError(String.format("Expected '%s' and instead saw '%s'", shouldBe, operator), t); } if (ctx.getChild(0) instanceof TypeofExpressionContext) { processTypeofExpression(ctx); } } private void processTypeofExpression(EqualityExpressionContext ctx) { TokenStream ts = parser.getTokenStream(); Token t = ts.get(ctx.getChild(2).getSourceInterval().a); List<String> allowedTypeof = Arrays.asList("undefined", "boolean", "number", "string", "function", "object"); if (t.getType() == 99) { String literal = t.getText(); literal = literal.substring(1, literal.length() - 1); if (!allowedTypeof.contains(literal)) { reportError(String.format("Invalid typeof value '%s'", literal), t); } } } @Override public void enterSourceElements(SourceElementsContext ctx) { if (ctx.getParent() instanceof ProgramContext) { return; } } @Override public void enterVariableDeclaration(VariableDeclarationContext ctx) { scopeManager.defineVariable(ctx.Identifier().getSymbol()); } @Override public void exitProgram(ProgramContext ctx) { scopeManager.unstack(); } @Override public void exitFunctionBody(FunctionBodyContext ctx) { scopeManager.unstack(); if (ctx.sourceElements() != null) { int statementCount = ctx.sourceElements().sourceElement().size(); if (verifyMaxStatements && statementCount > MAX_STATEMENTS) { reportError(String.format("This function has too many statements (%d)", statementCount), ctx.getParent().getStart()); } } } @Override public void enterFunctionDeclaration(FunctionDeclarationContext ctx) { scopeManager.defineVariable(ctx.Identifier().getSymbol()); scopeManager.stack(ScopeType.FUNCTION); } @Override public void enterFunctionExpression(FunctionExpressionContext ctx) { scopeManager.stack(ScopeType.FUNCTION); } @Override public void enterFormalParameterList(FormalParameterListContext ctx) { for (TerminalNode id : ctx.Identifier()) { scopeManager.defineVariable(id.getSymbol()); } int numberOfParams = ctx.Identifier().size(); if (verifyMaxParams && numberOfParams > MAX_PARAMS) { reportError(String.format("This function has too many parameters (%d)", numberOfParams), ctx.getStart()); } } @Override public void enterIdentifierExpression(IdentifierExpressionContext ctx) { TerminalNode token = ctx.Identifier(); scopeManager.registerUsage(token.getSymbol()); if (token.getText().equals("eval")) { reportError("eval can be harmful", token.getSymbol()); } } @Override public void enterStatement(StatementContext ctx) { ParserRuleContext parentContext = ctx.getParent(); if (parentContext instanceof IfStatementContext || parentContext instanceof IterationStatementContext || parentContext instanceof WithStatementContext) { ParseTree childContext = ctx.getChild(0); if (!(childContext instanceof BlockContext)) { TokenStream ts = parser.getTokenStream(); Token t = ts.get(childContext.getSourceInterval().a); reportError(String.format("Expected '{' and instead saw '%s'", t.getText()), t); } } } private void checkForSemicolon(ParserRuleContext ctx) { ParseTree lastChild = ctx.getChild(ctx.getChildCount() - 1); if (ctx.getChild(ctx.getChildCount() - 1) instanceof EosContext) { EosContext eos = (EosContext) lastChild; TerminalNode semicolon = eos.SemiColon(); if (semicolon == null) { reportError("Missing semicolon", eos.getStop()); } } } @Override public void exitExpressionStatement(ExpressionStatementContext ctx) { checkForSemicolon(ctx); } @Override public void exitVariableStatement(VariableStatementContext ctx) { checkForSemicolon(ctx); } @Override public void exitDoStatement(DoStatementContext ctx) { checkForSemicolon(ctx); blockDepth -= 1; } @Override public void exitContinueStatement(ContinueStatementContext ctx) { checkForSemicolon(ctx); } @Override public void exitBreakStatement(BreakStatementContext ctx) { checkForSemicolon(ctx); } @Override public void exitReturnStatement(ReturnStatementContext ctx) { checkForSemicolon(ctx); } @Override public void exitThrowStatement(ThrowStatementContext ctx) { checkForSemicolon(ctx); } @Override public void enterDebuggerStatement(DebuggerStatementContext ctx) { reportError("Forgotten 'debugger' statement?", ctx.getStart()); checkForSemicolon(ctx); } @Override public void enterNewExpression(NewExpressionContext ctx) { if (!(ctx.getParent() instanceof InitialiserContext)) { reportError("Do not use 'new' for side effects", ctx.getStart()); } } @Override public void exitForInStatement(ForInStatementContext ctx) { if (!checkForInStatement(ctx.statement())) { reportForInError(ctx); } blockDepth -= 1; } @Override public void exitForVarInStatement(ForVarInStatementContext ctx) { if (!checkForInStatement(ctx.statement())) { reportForInError(ctx); } blockDepth -= 1; } private boolean checkForInStatement(StatementContext statement) { if (statement.block() == null) { if (statement.ifStatement() == null) { return false; } } else { IfStatementContext ifStmt = statement.block().statementList().statement(0).ifStatement(); // For-in must be wrapped with an if statement if (ifStmt == null) { return false; } else { // When on if (!expr), the first statement must be a 'continue' if (ifStmt.expressionSequence().singleExpression(0) instanceof NotExpressionContext) { List<StatementContext> statements; BlockContext ifBlock = ifStmt.statement(0).block(); if (ifBlock != null) { statements = ifBlock.statementList().statement(); } else { statements = ifStmt.statement(); } if (statements.get(0).continueStatement() == null) { return false; } } } } return true; } private void reportForInError(ParserRuleContext ctx) { reportError("The body of a for in should be wrapped in an if statement to filter unwanted properties from the prototype.", ctx.getStart()); } private void checkBlockDepth(ParserRuleContext ctx) { if (verifyMaxDepth && blockDepth == MAX_DEPTH + 1) { reportError(String.format("Blocks are nested too deeply (%d)", MAX_DEPTH + 1), ctx.getStart()); } } @Override public void enterBlock(BlockContext ctx) { if (ctx.getParent() instanceof StatementContext) return; blockDepth += 1; checkBlockDepth(ctx); } @Override public void exitBlock(BlockContext ctx) { if (ctx.getParent() instanceof StatementContext) return; blockDepth -= 1; } @Override public void enterIfStatement(IfStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterDoStatement(DoStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterWhileStatement(WhileStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterForStatement(ForStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterForVarStatement(ForVarStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterForInStatement(ForInStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterForVarInStatement(ForVarInStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void enterTryStatement(TryStatementContext ctx) { blockDepth += 1; checkBlockDepth(ctx); } @Override public void exitIfStatement(IfStatementContext ctx) { blockDepth -= 1; } @Override public void exitWhileStatement(WhileStatementContext ctx) { blockDepth -= 1; } @Override public void exitForStatement(ForStatementContext ctx) { blockDepth -= 1; } @Override public void exitForVarStatement(ForVarStatementContext ctx) { blockDepth -= 1; } @Override public void exitTryStatement(TryStatementContext ctx) { blockDepth -= 1; } }
/* * Copyright (C) 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja; import com.google.common.base.Optional; import ninja.exceptions.BadRequestException; import ninja.exceptions.InternalServerErrorException; import ninja.i18n.Messages; import ninja.lifecycle.LifecycleService; import ninja.utils.Message; import ninja.utils.NinjaConstant; import ninja.utils.ResultHandler; import static org.hamcrest.CoreMatchers.equalTo; import org.junit.Test; import static org.junit.Assert.*; import org.junit.Before; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Matchers; import static org.mockito.Matchers.any; import org.mockito.Mock; import org.mockito.Mockito; import static org.mockito.Mockito.verify; import org.mockito.runners.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class NinjaDefaultTest { @Mock LifecycleService lifecylceService; @Mock ResultHandler resultHandler; @Mock Router router; @Mock Context.Impl contextImpl; @Mock Messages messages; @Mock Result result; Route route; @Captor ArgumentCaptor<Result> resultCaptor; NinjaDefault ninjaDefault; @Before public void before() { ninjaDefault = Mockito.spy(new NinjaDefault()); ninjaDefault.lifecycleService = lifecylceService; ninjaDefault.resultHandler = resultHandler; ninjaDefault.router = router; ninjaDefault.messages = messages; // Just a dummy to make logging work without // Null pointer exceptions. route = Mockito.mock(Route.class); Mockito.when(contextImpl.getRequestPath()).thenReturn("/path"); Mockito.when(contextImpl.getRoute()).thenReturn(route); Mockito.when(contextImpl.getMethod()).thenReturn("httpMethod"); Mockito.when(contextImpl.getRequestPath()).thenReturn("requestPath"); Mockito.when(router.getRouteFor(Matchers.eq("httpMethod"), Matchers.eq("requestPath"))).thenReturn(route); // just a default answer so we don't get a nullpointer badRequestException. // can be verified later... Mockito.when( messages.getWithDefault( Matchers.anyString(), Matchers.anyString(), any(Optional.class))) .thenReturn("NOT_IMPORTANT_MESSAGE"); } @Test public void testOnRouteRequestWhenEverythingWorks() throws Exception { FilterChain filterChain = Mockito.mock(FilterChain.class); Mockito.when(route.getFilterChain()).thenReturn(filterChain); Result result = Mockito.mock(Result.class); Mockito.when(filterChain.next(contextImpl)).thenReturn(result); ninjaDefault.onRouteRequest(contextImpl); verify(contextImpl).setRoute(route); verify(resultHandler).handleResult(result, contextImpl); verify(ninjaDefault, Mockito.never()).getInternalServerErrorResult(any(Context.class), any(Exception.class)); verify(ninjaDefault, Mockito.never()).getBadRequestResult(any(Context.class), any(Exception.class)); verify(ninjaDefault, Mockito.never()).getNotFoundResult(any(Context.class)); } @Test public void testOnRouteRequestWhenException() throws Exception { Mockito.when( messages.getWithDefault( Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_INTERNAL_SERVER_ERROR_TEXT_KEY), Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_INTERNAL_SERVER_ERROR_TEXT_DEFAULT), any(Optional.class))) .thenReturn(NinjaConstant.I18N_NINJA_SYSTEM_INTERNAL_SERVER_ERROR_TEXT_DEFAULT); FilterChain filterChain = Mockito.mock(FilterChain.class); Mockito.when(route.getFilterChain()).thenReturn(filterChain); Exception exception = new RuntimeException("That's a very generic exception that should be handled by onError!"); Mockito.when(filterChain.next(contextImpl)).thenThrow(exception); ninjaDefault.onRouteRequest(contextImpl); verify(ninjaDefault).getInternalServerErrorResult(contextImpl, exception); } @Test public void testOnRouteRequestWhenInternalServerErrorException() throws Exception { FilterChain filterChain = Mockito.mock(FilterChain.class); Mockito.when(route.getFilterChain()).thenReturn(filterChain); InternalServerErrorException internalServerErrorException = new InternalServerErrorException("That's an InternalServerErrorException that should be handled by onError!"); Mockito.when(filterChain.next(contextImpl)).thenThrow(internalServerErrorException); ninjaDefault.onRouteRequest(contextImpl); verify(ninjaDefault).getInternalServerErrorResult(contextImpl, internalServerErrorException); } @Test public void testOnRouteRequestWhenOnBadRequest() throws Exception { FilterChain filterChain = Mockito.mock(FilterChain.class); Mockito.when(route.getFilterChain()).thenReturn(filterChain); BadRequestException badRequest = new BadRequestException("That's a BadRequest that should be handled by onBadRequest"); Mockito.when(filterChain.next(contextImpl)).thenThrow(badRequest); ninjaDefault.onRouteRequest(contextImpl); verify(ninjaDefault).getBadRequestResult(contextImpl, badRequest); } @Test public void testOnRouteRequestWhenOnNotFound() throws Exception { FilterChain filterChain = Mockito.mock(FilterChain.class); Mockito.when(route.getFilterChain()).thenReturn(filterChain); // This simulates that a route has not been found // subsequently the onNotFound method should be called. Mockito.when( router.getRouteFor( Matchers.anyString(), Matchers.anyString())) .thenReturn(null); ninjaDefault.onRouteRequest(contextImpl); verify(ninjaDefault).getNotFoundResult(contextImpl); } @Test public void testOnExceptionBadRequest() { Exception badRequestException = new BadRequestException(); Result result = ninjaDefault.onException(contextImpl, badRequestException); verify(ninjaDefault).getBadRequestResult(contextImpl, badRequestException); assertThat(result.getStatusCode(), equalTo(Result.SC_400_BAD_REQUEST)); } @Test public void testOnExceptionCatchAll() { Exception anyException = new Exception(); Result result = ninjaDefault.onException(contextImpl, anyException); verify(ninjaDefault).getInternalServerErrorResult(contextImpl, anyException); assertThat(result.getStatusCode(), equalTo(Result.SC_500_INTERNAL_SERVER_ERROR)); } @Test public void testThatGetInternalServerErrorContentNegotiation() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn(Result.APPLICATON_JSON); Result result = ninjaDefault.getInternalServerErrorResult(contextImpl, new Exception("not important")); assertThat(result.getContentType(), equalTo(null)); assertThat(result.supportedContentTypes().size(), equalTo(3)); } @Test public void testThatGetInternalServerErrorDoesFallsBackToHtml() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn("not_supported"); Result result = ninjaDefault.getInternalServerErrorResult(contextImpl, new Exception("not important")); assertThat(result.fallbackContentType().get(), equalTo(Result.TEXT_HTML)); } @Test public void getInternalServerErrorResult() throws Exception { // real test: Result result = ninjaDefault.getInternalServerErrorResult( contextImpl, new Exception("not important")); assertThat(result.getStatusCode(), equalTo(Result.SC_500_INTERNAL_SERVER_ERROR)); assertThat(result.getTemplate(), equalTo(NinjaConstant.LOCATION_VIEW_FTL_HTML_INTERNAL_SERVER_ERROR)); assertTrue(result.getRenderable() instanceof Message); verify(messages).getWithDefault( Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_INTERNAL_SERVER_ERROR_TEXT_KEY), Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_INTERNAL_SERVER_ERROR_TEXT_DEFAULT), Matchers.eq(contextImpl), any(Optional.class)); } @Test public void testThatGetBadRequestContentNegotiation() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn(Result.APPLICATON_JSON); Result result = ninjaDefault.getBadRequestResult(contextImpl, new Exception("not important")); assertThat(result.getContentType(), equalTo(null)); assertThat(result.supportedContentTypes().size(), equalTo(3)); } @Test public void testThatGetBadRequestDoesFallsBackToHtml() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn("not_supported"); Result result = ninjaDefault.getBadRequestResult(contextImpl, new Exception("not important")); assertThat(result.fallbackContentType().get(), equalTo(Result.TEXT_HTML)); } @Test public void testGetBadRequest() throws Exception { // real test: Result result = ninjaDefault.getBadRequestResult( contextImpl, new BadRequestException("not important")); assertThat(result.getStatusCode(), equalTo(Result.SC_400_BAD_REQUEST)); assertThat(result.getTemplate(), equalTo(NinjaConstant.LOCATION_VIEW_FTL_HTML_BAD_REQUEST)); assertTrue(result.getRenderable() instanceof Message); verify(messages).getWithDefault( Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_BAD_REQUEST_TEXT_KEY), Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_BAD_REQUEST_TEXT_DEFAULT), Matchers.eq(contextImpl), any(Optional.class)); } @Test public void testThatGetOnNotFoundDoesContentNegotiation() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn(Result.APPLICATON_JSON); Result result = ninjaDefault.getNotFoundResult(contextImpl); assertThat(result.getContentType(), equalTo(null)); assertThat(result.supportedContentTypes().size(), equalTo(3)); } @Test public void testThatGetOnNotFoundDoesFallsBackToHtml() throws Exception { Mockito.when(contextImpl.getAcceptContentType()).thenReturn("not_supported"); Result result = ninjaDefault.getNotFoundResult(contextImpl); assertThat(result.fallbackContentType().get(), equalTo(Result.TEXT_HTML)); } @Test public void testGetOnNotFoundResultWorks() throws Exception { Result result = ninjaDefault.getNotFoundResult(contextImpl); assertThat(result.getStatusCode(), equalTo(Result.SC_404_NOT_FOUND)); assertThat(result.getTemplate(), equalTo(NinjaConstant.LOCATION_VIEW_FTL_HTML_NOT_FOUND)); assertTrue(result.getRenderable() instanceof Message); verify(messages).getWithDefault( Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_NOT_FOUND_TEXT_KEY), Matchers.eq(NinjaConstant.I18N_NINJA_SYSTEM_NOT_FOUND_TEXT_DEFAULT), Matchers.eq(contextImpl), any(Optional.class)); } @Test public void testOnFrameworkStart() { ninjaDefault.onFrameworkStart(); verify(lifecylceService).start(); } @Test public void testOnFrameworkShutdown() { ninjaDefault.onFrameworkShutdown(); verify(lifecylceService).stop(); } @Test public void testRenderErrorResultAndCatchAndLogExceptionsAsync() { Mockito.when(contextImpl.isAsync()).thenReturn(true); ninjaDefault.renderErrorResultAndCatchAndLogExceptions(result, contextImpl); verify(contextImpl).isAsync(); verify(contextImpl).returnResultAsync(result); } @Test public void testRenderErrorResultAndCatchAndLogExceptionsSync() { Mockito.when(contextImpl.isAsync()).thenReturn(false); ninjaDefault.renderErrorResultAndCatchAndLogExceptions(result, contextImpl); verify(resultHandler).handleResult(result, contextImpl); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.service.modules.core; import java.io.File; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.SystemUtils; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.dircache.DirCache; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.lib.RepositoryCache; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.transport.RefSpec; import org.eclipse.jgit.util.FS; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.io.Files; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.apache.gobblin.config.ConfigBuilder; import org.apache.gobblin.configuration.ConfigurationKeys; import org.apache.gobblin.runtime.api.TopologySpec; import org.apache.gobblin.service.ServiceConfigKeys; import org.apache.gobblin.service.modules.flow.MultiHopFlowCompilerTest; import org.apache.gobblin.service.modules.flowgraph.BaseFlowGraph; import org.apache.gobblin.service.modules.flowgraph.DataNode; import org.apache.gobblin.service.modules.flowgraph.FlowEdge; import org.apache.gobblin.service.modules.flowgraph.FlowGraphConfigurationKeys; import org.apache.gobblin.service.modules.template_catalog.FSFlowCatalog; public class GitFlowGraphMonitorTest { private static final Logger logger = LoggerFactory.getLogger(GitFlowGraphMonitor.class); private Repository remoteRepo; private Git gitForPush; private static final String TEST_DIR = "/tmp/gitFlowGraphTestDir"; private final File remoteDir = new File(TEST_DIR + "/remote"); private final File cloneDir = new File(TEST_DIR + "/clone"); private final File flowGraphDir = new File(cloneDir, "/gobblin-flowgraph"); private static final String NODE_1_FILE = "node1.properties"; private final File node1Dir = new File(flowGraphDir, "node1"); private final File node1File = new File(node1Dir, NODE_1_FILE); private static final String NODE_2_FILE = "node2.properties"; private final File node2Dir = new File(flowGraphDir, "node2"); private final File node2File = new File(node2Dir, NODE_2_FILE); private final File edge1Dir = new File(node1Dir, "node2"); private final File edge1File = new File(edge1Dir, "edge1.properties"); private RefSpec masterRefSpec = new RefSpec("master"); private Optional<FSFlowCatalog> flowCatalog; private Config config; private BaseFlowGraph flowGraph; private GitFlowGraphMonitor gitFlowGraphMonitor; @BeforeClass public void setUp() throws Exception { cleanUpDir(TEST_DIR); // Create a bare repository RepositoryCache.FileKey fileKey = RepositoryCache.FileKey.exact(remoteDir, FS.DETECTED); this.remoteRepo = fileKey.open(false); this.remoteRepo.create(true); this.gitForPush = Git.cloneRepository().setURI(this.remoteRepo.getDirectory().getAbsolutePath()).setDirectory(cloneDir).call(); // push an empty commit as a base for detecting changes this.gitForPush.commit().setMessage("First commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); URI topologyCatalogUri = this.getClass().getClassLoader().getResource("topologyspec_catalog").toURI(); Map<URI, TopologySpec> topologySpecMap = MultiHopFlowCompilerTest.buildTopologySpecMap(topologyCatalogUri); this.config = ConfigBuilder.create() .addPrimitive(GitFlowGraphMonitor.GIT_FLOWGRAPH_MONITOR_PREFIX + "." + ConfigurationKeys.GIT_MONITOR_REPO_URI, this.remoteRepo.getDirectory().getAbsolutePath()) .addPrimitive(GitFlowGraphMonitor.GIT_FLOWGRAPH_MONITOR_PREFIX + "." + ConfigurationKeys.GIT_MONITOR_REPO_DIR, TEST_DIR + "/git-flowgraph") .addPrimitive(GitFlowGraphMonitor.GIT_FLOWGRAPH_MONITOR_PREFIX + "." + ConfigurationKeys.GIT_MONITOR_POLLING_INTERVAL, 5) .build(); // Create a FSFlowCatalog instance URI flowTemplateCatalogUri = this.getClass().getClassLoader().getResource("template_catalog").toURI(); Properties properties = new Properties(); properties.put(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY, flowTemplateCatalogUri.toString()); Config config = ConfigFactory.parseProperties(properties); Config templateCatalogCfg = config .withValue(ConfigurationKeys.JOB_CONFIG_FILE_GENERAL_PATH_KEY, config.getValue(ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY)); this.flowCatalog = Optional.of(new FSFlowCatalog(templateCatalogCfg)); //Create a FlowGraph instance with defaults this.flowGraph = new BaseFlowGraph(); this.gitFlowGraphMonitor = new GitFlowGraphMonitor(this.config, this.flowCatalog, this.flowGraph, topologySpecMap, new CountDownLatch(1)); this.gitFlowGraphMonitor.setActive(true); } @Test public void testAddNode() throws IOException, GitAPIException { String file1Contents = FlowGraphConfigurationKeys.DATA_NODE_IS_ACTIVE_KEY + "=true\nparam1=value1\n"; String file2Contents = FlowGraphConfigurationKeys.DATA_NODE_IS_ACTIVE_KEY + "=true\nparam2=value2\n"; addNode(this.node1Dir, this.node1File, file1Contents); addNode(this.node2Dir, this.node2File, file2Contents); this.gitFlowGraphMonitor.processGitConfigChanges(); for (int i = 0; i < 1; i++) { String nodeId = "node" + (i + 1); String paramKey = "param" + (i + 1); String paramValue = "value" + (i + 1); //Check if nodes have been added to the FlowGraph DataNode dataNode = this.flowGraph.getNode(nodeId); Assert.assertEquals(dataNode.getId(), nodeId); Assert.assertTrue(dataNode.isActive()); Assert.assertEquals(dataNode.getRawConfig().getString(paramKey), paramValue); } } @Test (dependsOnMethods = "testAddNode") public void testAddEdge() throws IOException, GitAPIException, ExecutionException, InterruptedException { //Build contents of edge file String fileContents = buildEdgeFileContents("node1", "node2", "edge1", "value1"); addEdge(this.edge1Dir, this.edge1File, fileContents); this.gitFlowGraphMonitor.processGitConfigChanges(); //Check if edge1 has been added to the FlowGraph testIfEdgeSuccessfullyAdded("node1", "node2", "edge1", "value1"); } @Test (dependsOnMethods = "testAddNode") public void testUpdateEdge() throws IOException, GitAPIException, URISyntaxException, ExecutionException, InterruptedException { //Update edge1 file String fileContents = buildEdgeFileContents("node1", "node2", "edge1", "value2"); addEdge(this.edge1Dir, this.edge1File, fileContents); // add, commit, push this.gitForPush.add().addFilepattern(formEdgeFilePath(this.edge1Dir.getParentFile().getName(), this.edge1Dir.getName(), this.edge1File.getName())).call(); this.gitForPush.commit().setMessage("Edge commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitFlowGraphMonitor.processGitConfigChanges(); //Check if new edge1 has been added to the FlowGraph testIfEdgeSuccessfullyAdded("node1", "node2", "edge1", "value2"); } @Test (dependsOnMethods = "testUpdateEdge") public void testUpdateNode() throws IOException, GitAPIException, URISyntaxException, ExecutionException, InterruptedException { //Update param1 value in node1 and check if updated node is added to the graph String fileContents = FlowGraphConfigurationKeys.DATA_NODE_IS_ACTIVE_KEY + "=true\nparam1=value3\n"; addNode(this.node1Dir, this.node1File, fileContents); this.gitFlowGraphMonitor.processGitConfigChanges(); //Check if node has been updated in the FlowGraph DataNode dataNode = this.flowGraph.getNode("node1"); Assert.assertEquals(dataNode.getId(), "node1"); Assert.assertTrue(dataNode.isActive()); Assert.assertEquals(dataNode.getRawConfig().getString("param1"), "value3"); } @Test (dependsOnMethods = "testUpdateNode") public void testRemoveEdge() throws GitAPIException, IOException { // delete a config file edge1File.delete(); //Node1 has 1 edge before delete Set<FlowEdge> edgeSet = this.flowGraph.getEdges("node1"); Assert.assertEquals(edgeSet.size(), 1); // delete, commit, push DirCache ac = this.gitForPush.rm().addFilepattern(formEdgeFilePath(this.edge1Dir.getParentFile().getName(), this.edge1Dir.getName(), this.edge1File.getName())).call(); RevCommit cc = this.gitForPush.commit().setMessage("Edge remove commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitFlowGraphMonitor.processGitConfigChanges(); //Check if edge1 has been deleted from the graph edgeSet = this.flowGraph.getEdges("node1"); Assert.assertTrue(edgeSet.size() == 0); } @Test (dependsOnMethods = "testRemoveEdge") public void testRemoveNode() throws GitAPIException, IOException { //delete node files node1File.delete(); node2File.delete(); //Ensure node1 and node2 are present in the graph before delete DataNode node1 = this.flowGraph.getNode("node1"); Assert.assertNotNull(node1); DataNode node2 = this.flowGraph.getNode("node2"); Assert.assertNotNull(node2); // delete, commit, push this.gitForPush.rm().addFilepattern(formNodeFilePath(this.node1Dir.getName(), this.node1File.getName())).call(); this.gitForPush.rm().addFilepattern(formNodeFilePath(this.node2Dir.getName(), this.node2File.getName())).call(); this.gitForPush.commit().setMessage("Node remove commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitFlowGraphMonitor.processGitConfigChanges(); //Check if node1 and node 2 have been deleted from the graph node1 = this.flowGraph.getNode("node1"); Assert.assertNull(node1); node2 = this.flowGraph.getNode("node2"); Assert.assertNull(node2); } @Test (dependsOnMethods = "testRemoveNode") public void testChangesReorder() throws GitAPIException, IOException, ExecutionException, InterruptedException { String node1FileContents = FlowGraphConfigurationKeys.DATA_NODE_IS_ACTIVE_KEY + "=true\nparam1=value1\n"; String node2FileContents = FlowGraphConfigurationKeys.DATA_NODE_IS_ACTIVE_KEY + "=true\nparam2=value2\n"; String edgeFileContents = buildEdgeFileContents("node1", "node2", "edge1", "value1"); createNewFile(this.node1Dir, this.node1File, node1FileContents); createNewFile(this.node2Dir, this.node2File, node2FileContents); createNewFile(this.edge1Dir, this.edge1File, edgeFileContents); // add, commit, push this.gitForPush.add().addFilepattern(formNodeFilePath(this.node1Dir.getName(), this.node1File.getName())).call(); this.gitForPush.add().addFilepattern(formNodeFilePath(this.node2Dir.getName(), this.node2File.getName())).call(); this.gitForPush.commit().setMessage("Add nodes commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitForPush.add().addFilepattern(formEdgeFilePath(this.edge1Dir.getParentFile().getName(), this.edge1Dir.getName(), this.edge1File.getName())).call(); this.gitForPush.commit().setMessage("Add nodes and edges commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitFlowGraphMonitor.processGitConfigChanges(); //Ensure node1 and node2 are present in the graph DataNode node1 = this.flowGraph.getNode("node1"); Assert.assertNotNull(node1); DataNode node2 = this.flowGraph.getNode("node2"); Assert.assertNotNull(node2); testIfEdgeSuccessfullyAdded("node1", "node2", "edge1", "value1"); //Delete node1, edge node1->node2 files node1File.delete(); edge1File.delete(); //Commit1: delete node1 and edge node1->node2 this.gitForPush.rm().addFilepattern(formNodeFilePath(this.node1Dir.getName(), this.node1File.getName())).call(); this.gitForPush.rm().addFilepattern(formEdgeFilePath(this.edge1Dir.getParentFile().getName(), this.edge1Dir.getName(), this.edge1File.getName())).call(); this.gitForPush.commit().setMessage("Delete node1 and edge1 commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); //Commit2: add node1 back createNewFile(this.node1Dir, this.node1File, node1FileContents); this.gitForPush.add().addFilepattern(formNodeFilePath(this.node1Dir.getName(), this.node1File.getName())).call(); this.gitForPush.commit().setMessage("Add node1 commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); this.gitFlowGraphMonitor.processGitConfigChanges(); node1 = this.flowGraph.getNode("node1"); Assert.assertNotNull(node1); Assert.assertEquals(this.flowGraph.getEdges(node1).size(), 0); } @AfterClass public void tearDown() throws Exception { cleanUpDir(TEST_DIR); } private void createNewFile(File dir, File file, String fileContents) throws IOException { dir.mkdirs(); file.createNewFile(); Files.write(fileContents, file, Charsets.UTF_8); } private void addNode(File nodeDir, File nodeFile, String fileContents) throws IOException, GitAPIException { createNewFile(nodeDir, nodeFile, fileContents); // add, commit, push node this.gitForPush.add().addFilepattern(formNodeFilePath(nodeDir.getName(), nodeFile.getName())).call(); this.gitForPush.commit().setMessage("Node commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); } private void addEdge(File edgeDir, File edgeFile, String fileContents) throws IOException, GitAPIException { createNewFile(edgeDir, edgeFile, fileContents); // add, commit, push edge this.gitForPush.add().addFilepattern(formEdgeFilePath(edgeDir.getParentFile().getName(), edgeDir.getName(), edgeFile.getName())).call(); this.gitForPush.commit().setMessage("Edge commit").call(); this.gitForPush.push().setRemote("origin").setRefSpecs(this.masterRefSpec).call(); } private String buildEdgeFileContents(String node1, String node2, String edgeName, String value) { String fileContents = FlowGraphConfigurationKeys.FLOW_EDGE_SOURCE_KEY + "=" + node1 + "\n" + FlowGraphConfigurationKeys.FLOW_EDGE_DESTINATION_KEY + "=" + node2 + "\n" + FlowGraphConfigurationKeys.FLOW_EDGE_NAME_KEY + "=" + edgeName + "\n" + FlowGraphConfigurationKeys.FLOW_EDGE_IS_ACTIVE_KEY + "=true\n" + FlowGraphConfigurationKeys.FLOW_EDGE_TEMPLATE_DIR_URI_KEY + "=FS:///flowEdgeTemplate\n" + FlowGraphConfigurationKeys.FLOW_EDGE_SPEC_EXECUTORS_KEY + "=testExecutor1,testExecutor2\n" + "key1=" + value + "\n"; return fileContents; } private void testIfEdgeSuccessfullyAdded(String node1, String node2, String edgeName, String value) throws ExecutionException, InterruptedException { Set<FlowEdge> edgeSet = this.flowGraph.getEdges(node1); Assert.assertEquals(edgeSet.size(), 1); FlowEdge flowEdge = edgeSet.iterator().next(); Assert.assertEquals(flowEdge.getId(), Joiner.on(":").join(node1, node2, edgeName)); Assert.assertEquals(flowEdge.getSrc(), node1); Assert.assertEquals(flowEdge.getDest(), node2); Assert.assertEquals(flowEdge.getExecutors().get(0).getConfig().get().getString("specStore.fs.dir"), "/tmp1"); Assert.assertEquals(flowEdge.getExecutors().get(0).getConfig().get().getString("specExecInstance.capabilities"), "s1:d1"); Assert.assertEquals(flowEdge.getExecutors().get(0).getClass().getSimpleName(), "InMemorySpecExecutor"); Assert.assertEquals(flowEdge.getExecutors().get(1).getConfig().get().getString("specStore.fs.dir"), "/tmp2"); Assert.assertEquals(flowEdge.getExecutors().get(1).getConfig().get().getString("specExecInstance.capabilities"), "s2:d2"); Assert.assertEquals(flowEdge.getExecutors().get(1).getClass().getSimpleName(), "InMemorySpecExecutor"); Assert.assertEquals(flowEdge.getConfig().getString("key1"), value); } private String formNodeFilePath(String groupDir, String fileName) { return this.flowGraphDir.getName() + SystemUtils.FILE_SEPARATOR + groupDir + SystemUtils.FILE_SEPARATOR + fileName; } private String formEdgeFilePath(String parentDir, String groupDir, String fileName) { return this.flowGraphDir.getName() + SystemUtils.FILE_SEPARATOR + parentDir + SystemUtils.FILE_SEPARATOR + groupDir + SystemUtils.FILE_SEPARATOR + fileName; } private void cleanUpDir(String dir) { File specStoreDir = new File(dir); // cleanup is flaky on Travis, so retry a few times and then suppress the error if unsuccessful for (int i = 0; i < 5; i++) { try { if (specStoreDir.exists()) { FileUtils.deleteDirectory(specStoreDir); } // if delete succeeded then break out of loop break; } catch (IOException e) { logger.warn("Cleanup delete directory failed for directory: " + dir, e); } } } }
/*========================================================================= * Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.cache.query; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.apache.logging.log4j.Logger; import com.gemstone.gemfire.SystemFailure; import com.gemstone.gemfire.cache.CacheCallback; import com.gemstone.gemfire.internal.i18n.LocalizedStrings; import com.gemstone.gemfire.internal.logging.LogService; import com.gemstone.gemfire.internal.logging.log4j.LocalizedMessage; /** * The factory class for the CqAttributes instance. This provides the CqListener * setter methods. This class maintains state for and creates new instances of the * CqAttributes interface for new CqQuery instances. * If you create a factory with the default constructor, then the factory is set * up to create attributes with all default settings. You can also create a factory * by providing a <code>CqAttributes</code>, which will set up the new factory with * the settings provided in that attributes instance. * * <p>Once a <code>CqAttributes</code> is created, it can only be modified * after it has been used to create a <code>CqQuery</code>. * * @see CqAttributes * * @author Anil * @since 5.5 */ public class CqAttributesFactory { private static final Logger logger = LogService.getLogger(); /* Handle for CqAttributes. */ private final CqAttributesImpl cqAttributes = new CqAttributesImpl(); /** * Creates a new instance of AttributesFactory ready to create a * <code>CqAttributes</code> with default settings. */ public CqAttributesFactory() { } /** * Creates a new instance of CqAttributesFactory ready to create a * <code>CqAttributes</code> with the same settings as those in the * specified <code>CqAttributes</code>. * * @param cqAttributes * the <code>CqAttributes</code> used to initialize this * AttributesFactory */ public CqAttributesFactory(CqAttributes cqAttributes) { synchronized (this.cqAttributes) { this.cqAttributes.cqListeners = new ArrayList(Arrays.asList(cqAttributes.getCqListeners())); } } /** * Adds a CQ listener to the end of the list of cq listeners on this factory. * @param cqListener the CqListener to add to the factory. * @throws IllegalArgumentException if <code>cqListener</code> is null */ public void addCqListener(CqListener cqListener) { if (cqListener == null) { throw new IllegalArgumentException(LocalizedStrings.CqAttributesFactory_ADDCQLISTENER_PARAMETER_WAS_NULL.toLocalizedString()); } synchronized (this.cqAttributes) { this.cqAttributes.addCqListener(cqListener); } } /** * Removes all Cq listeners and then adds each listener in the specified array. * @param cqListeners a possibly null or empty array of listeners to add to this * factory. * @throws IllegalArgumentException if the <code>cqListeners</code> array has a * null element */ public void initCqListeners(CqListener[] cqListeners) { synchronized (this.cqAttributes) { if (cqListeners == null || cqListeners.length == 0) { this.cqAttributes.cqListeners = null; } else { List nl = Arrays.asList(cqListeners); if (nl.contains(null)) { throw new IllegalArgumentException(LocalizedStrings.CqAttributesFactory_INITCQLISTENERS_PARAMETER_HAD_A_NULL_ELEMENT.toLocalizedString()); } this.cqAttributes.cqListeners = new ArrayList(nl); } } } /** * Creates a <code>CqAttributes</code> with the current settings. * @return the newly created <code>CqAttributes</code> */ public CqAttributes create() { return (CqAttributes)this.cqAttributes.clone(); } protected static class CqAttributesImpl implements CqAttributes, CqAttributesMutator, Cloneable, Serializable { private static final long serialVersionUID = -959395592883099100L; ArrayList cqListeners = null; boolean dataPolicyHasBeenSet = false; private static final CqListener[] EMPTY_LISTENERS = new CqListener[0]; /** * Used to synchronize access to cqListeners */ private final Object clSync = new Object(); /** * Returns the CqListeners set with the CQ * @return CqListener[] */ public CqListener[] getCqListeners() { if (this.cqListeners == null){ return CqAttributesImpl.EMPTY_LISTENERS; } CqListener[] result = null; synchronized(this.clSync){ result = new CqListener[cqListeners.size()]; cqListeners.toArray(result); } return result; } /** * Returns the CqListener set with the CQ * @return CqListener */ public CqListener getCqListener() { ArrayList listeners = this.cqListeners; if (listeners == null) { return null; } synchronized (this.clSync) { if (listeners.size() == 0) { return null; } if (listeners.size() == 1) { return (CqListener)this.cqListeners.get(0); } } throw new IllegalStateException(LocalizedStrings.CqAttributesFactory_MORE_THAN_ONE_CQLISTENER_EXISTS.toLocalizedString()); } @Override public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new InternalError(LocalizedStrings.CqAttributesFactory_CLONENOTSUPPORTEDEXCEPTION_THROWN_IN_CLASS_THAT_IMPLEMENTS_CLONEABLE.toLocalizedString()); } } /** * Adds a Cqlistener to the end of the list of Cqlisteners on this CqQuery. * @param cql the user defined cq listener to add to the CqQuery. * @throws IllegalArgumentException if <code>aListener</code> is null */ public void addCqListener(CqListener cql) { if (cql == null) { throw new IllegalArgumentException(LocalizedStrings.CqAttributesFactory_ADDCQLISTENER_PARAMETER_WAS_NULL.toLocalizedString()); } synchronized (this.clSync) { ArrayList oldListeners = this.cqListeners; if (oldListeners == null || oldListeners.size() == 0) { ArrayList al = new ArrayList(1); al.add(cql); this.cqListeners = al; } else { if (!oldListeners.contains(cql)) { oldListeners.add(cql); } } } } /** * Removes all Cqlisteners, calling on each of them, and then adds each listener in the specified array. * @param addedListeners a possibly null or empty array of listeners to add to this CqQuery. * @throws IllegalArgumentException if the <code>newListeners</code> array has a null element */ public void initCqListeners(CqListener[] addedListeners) { ArrayList oldListeners = null; synchronized (this.clSync) { oldListeners = this.cqListeners; if (addedListeners == null || addedListeners.length == 0) { this.cqListeners = null; } else { // we have some listeners to add List nl = Arrays.asList(addedListeners); if (nl.contains(null)) { throw new IllegalArgumentException(LocalizedStrings.CqAttributesFactory_INITCQLISTENERS_PARAMETER_HAD_A_NULL_ELEMENT.toLocalizedString()); } this.cqListeners = new ArrayList(nl); } } if (oldListeners != null) { CqListener cql = null; for (Iterator iter = oldListeners.iterator(); iter.hasNext();) { try { cql = (CqListener)iter.next(); cql.close(); // Handle client side exceptions. } catch (Exception ex) { logger.warn(LocalizedMessage.create(LocalizedStrings.CqAttributesFactory_EXCEPTION_OCCURED_WHILE_CLOSING_CQ_LISTENER_ERROR_0, ex.getLocalizedMessage())); if (logger.isDebugEnabled()) { logger.debug(ex.getMessage(), ex); } } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); logger.warn(LocalizedMessage.create(LocalizedStrings.CqAttributesFactory_RUNTIME_EXCEPTION_OCCURED_WHILE_CLOSING_CQ_LISTENER_ERROR_0, t.getLocalizedMessage())); if (logger.isDebugEnabled()) { logger.debug(t.getMessage(), t); } } } } } /** * Removes a Cqlistener from the list of Cqlisteners on this CqQuery. * Does nothing if the specified listener has not been added. * If the specified listener has been added then {@link CacheCallback#close()} will * be called on it; otherwise does nothing. * @param cql the Cqlistener to remove from the CqQuery. * @throws IllegalArgumentException if <code>cl</code> is null */ public void removeCqListener(CqListener cql) { if (cql == null) { throw new IllegalArgumentException(LocalizedStrings.CqAttributesFactory_REMOVECQLISTENER_PARAMETER_WAS_NULL.toLocalizedString()); } synchronized (this.clSync) { ArrayList oldListeners = this.cqListeners; if (oldListeners != null) { if (oldListeners.remove(cql)) { if (oldListeners.isEmpty()) { this.cqListeners = null; } try { cql.close(); // Handle client side exceptions. } catch (Exception ex) { logger.warn(LocalizedMessage.create(LocalizedStrings.CqAttributesFactory_EXCEPTION_CLOSING_CQ_LISTENER_ERROR_0, ex.getLocalizedMessage())); if (logger.isDebugEnabled()) { logger.debug(ex.getMessage(), ex); } } catch (VirtualMachineError err) { SystemFailure.initiateFailure(err); // If this ever returns, rethrow the error. We're poisoned // now, so don't let this thread continue. throw err; } catch (Throwable t) { // Whenever you catch Error or Throwable, you must also // catch VirtualMachineError (see above). However, there is // _still_ a possibility that you are dealing with a cascading // error condition, so you also need to check to see if the JVM // is still usable: SystemFailure.checkFailure(); logger.warn(LocalizedMessage.create(LocalizedStrings.CqAttributesFactory_RUNTIME_EXCEPTION_OCCURED_CLOSING_CQ_LISTENER_ERROR_0, t.getLocalizedMessage())); if (logger.isDebugEnabled()) { logger.debug(t.getMessage(), t); } } } } } } } }
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ /** * */ package org.caleydo.core.data.perspective.table; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import org.caleydo.core.data.collection.EDataType; import org.caleydo.core.data.collection.table.Table; import org.caleydo.core.data.datadomain.ATableBasedDataDomain; import org.caleydo.core.data.datadomain.DataDomainManager; import org.caleydo.core.data.perspective.variable.Perspective; import org.caleydo.core.data.perspective.variable.PerspectiveInitializationData; import org.caleydo.core.data.virtualarray.VirtualArray; import org.caleydo.core.data.virtualarray.group.Group; import org.caleydo.core.data.virtualarray.group.GroupList; import org.caleydo.core.id.IDCategory; import org.caleydo.core.id.IDCreator; import org.caleydo.core.id.IDType; import org.caleydo.core.util.base.IDefaultLabelHolder; import org.caleydo.core.util.base.IUniqueObject; /** * <p> * A TablePerspective holds all the "rules" and properties on how the data in the underlying {@link Table} should be * accessed. It does so by holding references to one {@link Perspective} and one {@link Perspective}, who define things * like order, groups, and hierarchical relationships for either the dimensions or the records of a Table. * </p> * <p> * While the perspectives are only defined for either the records or the dimensions, and thereby cannot reference * specific cells (and consequently no data), the TablePerspective defines a concrete subset of the data. * </p> * <p> * This allows to calculate statistics (see {@link TablePerspectiveStatistics}) for a TablePerspective, thereby * providing things like histograms or averages. * </p> * <p> * A TablePerspective should be created/accessed by using * {@link ATableBasedDataDomain#getTablePerspective(String, String)}, where the Strings are the IDs of the perspectives * that define the TablePerspective. The dataDomain registers the tablePerspective for those perspective and provides * other instances which need a TablePerspective for the same combination of perspectives with the same instance of the * TablePerspective, thereby avoiding double-calculation of derived meta-data (which can be both, computationally and * storage-wise expensive) * </p> * <p> * The tablePerspectives are identified by a the {@link #tablePerspectiveKey}, which is created as a function of the * identifiers of the two perspectives. * </p> * <p> * Data containers can be hierarchically created based on {@link GroupList}s of one of the {@link Perspective}s using * the {@link #getRecordSubTablePerspectives()} and {@link #getDimensionSubTablePerspectives()}. The resulting * <code>TablePerspective</code>s have the {@link #recordGroup}, resp. the #dimensionGroup set, which are otherwise * null. * </p> * * @author Alexander Lex */ @XmlType @XmlRootElement public class TablePerspective implements IDefaultLabelHolder, IUniqueObject { public static final IDCategory DATA_CONTAINER = IDCategory.registerInternalCategory("DATA_CONTAINER"); public static final IDType DATA_CONTAINER_IDTYPE = IDType.registerInternalType("DataContainers", DATA_CONTAINER, EDataType.INTEGER); /** The unique id of the data container */ @XmlTransient private final int id = IDCreator.createVMUniqueID(TablePerspective.class); /** The key, which is created by using a function of the perspective IDs */ private String tablePerspectiveKey; /** The data domain use in this data container */ protected ATableBasedDataDomain dataDomain; @XmlElement private String dataDomainID; @XmlElement private String recordPerspectiveID; /** * The recordPerspective defines the properties of the records (occurrence, order, groups, relationships) */ @XmlTransient protected Perspective recordPerspective; @XmlElement private String dimensionPerspectiveID; /** Same as {@link #recordPerspective} for dimensions */ @XmlTransient protected Perspective dimensionPerspective; /** A human-readable label */ protected String label; /** * Flag telling whether the set label is a default (true) and thereby should probably not be displayed or whether * the label is worth displaying */ @XmlElement private boolean isDefaultLabel = true; /** * Flag determining whether this data container is private to a certain view. That means that other views typically * should not use this data container. */ @XmlElement protected boolean isPrivate; /** * A group containing all elements of the {@link #recordPerspective}'s virtual array of this data container. This is * only set when the <code>TablePerspective</code> is a sub-container of another <code>TablePerspective</code> which */ protected Group recordGroup = null; /** * Same as {@link #recordGroup} for dimensions */ protected Group dimensionGroup = null; /** * Object holding respectively calculating all forms of (statistical) meta-data for this container */ @XmlTransient protected TablePerspectiveStatistics tablePerspectiveStatistics = new TablePerspectiveStatistics(this); /** * The parent table perspective that has created this perspective as a child, e.g., using * {@link #getDimensionSubTablePerspectives()} or {@link #getRecordSubTablePerspectives()}. */ protected TablePerspective parentTablePerspective; /** * Empty constructor, nothing initialized */ public TablePerspective() { } /** * @param dataDomain * @param recordPerspective * @param dimensionPerspective */ public TablePerspective(ATableBasedDataDomain dataDomain, Perspective recordPerspective, Perspective dimensionPerspective) { this.dataDomain = dataDomain; this.dataDomainID = dataDomain.getDataDomainID(); this.recordPerspective = recordPerspective; this.recordPerspectiveID = recordPerspective.getPerspectiveID(); this.dimensionPerspective = dimensionPerspective; this.dimensionPerspectiveID = dimensionPerspective.getPerspectiveID(); createKey(); } /** * @return the id, see {@link #id} */ @Override public int getID() { return id; } /** * @return the tablePerspectiveKey, see {@link #tablePerspectiveKey} */ public String getTablePerspectiveKey() { return tablePerspectiveKey; } @XmlTransient public ATableBasedDataDomain getDataDomain() { return dataDomain; } public void setDataDomain(ATableBasedDataDomain dataDomain) { this.dataDomainID = dataDomain.getDataDomainID(); this.dataDomain = dataDomain; } /** * Convenience wrapper for {@link #getPerspective(IDType)}. Note that idType does only have to be of the same * category, not of the same type. * * @param idType * @return */ public Perspective getPerspective(IDType idType) { return getPerspective(idType.getIDCategory()); } /** * Returns the perspective matching the idCategory * * @param idCategory * @return * @throws IllegalStateException * if idCategory is not registered with this perspective */ public Perspective getPerspective(IDCategory idCategory) { if (recordPerspective.getIdType().getIDCategory().equals(idCategory)) { return recordPerspective; } else if (dimensionPerspective.getIdType().getIDCategory().equals(idCategory)) { return dimensionPerspective; } else { throw new IllegalStateException("ID Category " + idCategory + " not available for this perspective."); } } /** * Returns the perspective "opposite" of the one associated with this ID Type. This is a convenience wrapper to * {@link #getOppositePerspective(IDCategory)}. As a consequence only the IDCategory of the provided type matters, * not the actual type. * * @param idType * @return */ public Perspective getOppositePerspective(IDType idType) { return getOppositePerspective(idType.getIDCategory()); } /** * Returns the perspective "opposite" of the one associated with this ID Type. * * @param idType * @return */ public Perspective getOppositePerspective(IDCategory idCategory) { IDType oppositeType = dataDomain.getOppositeIDType(idCategory); return getPerspective(oppositeType); } /** * @return the recordPerspective, see {@link #recordPerspective} */ @XmlTransient public Perspective getRecordPerspective() { return recordPerspective; } /** * @param recordPerspective * setter, see {@link #recordPerspective} */ public void setRecordPerspective(Perspective recordPerspective) { // if (this.recordPerspective != null) // throw new IllegalStateException("Illegal to change perspectives of TablePerspectives."); this.recordPerspective = recordPerspective; this.recordPerspectiveID = recordPerspective.getPerspectiveID(); createKey(); } /** * @return the dimensionPerspective, see {@link #dimensionPerspective} */ @XmlTransient public Perspective getDimensionPerspective() { return dimensionPerspective; } /** * @param dimensionPerspective * setter, see {@link #dimensionPerspective} */ public void setDimensionPerspective(Perspective dimensionPerspective) { // if (this.dimensionPerspective != null) // throw new IllegalStateException("Illegal to change perspectives of TablePerspectives."); this.dimensionPerspective = dimensionPerspective; dimensionPerspectiveID = dimensionPerspective.getPerspectiveID(); createKey(); } /** * Checks whether the specified container id matches to the record perspective in this {@link TablePerspective} * * @param recordPerspectiveID * @return true if the specified id equals the id of the perspective in this container */ public boolean hasRecordPerspective(String recordPerspectiveID) { return recordPerspective.getPerspectiveID().equals(recordPerspectiveID); } /** * Same as {@link #hasAVariablePerspective(String)} for dimensions * * @param dimensionPerspectiveID * @return true if the specified id equals the id of the perspective in this container */ public boolean hasDimensionPerspective(String dimensionPerspectiveID) { return dimensionPerspective.getPerspectiveID().equals(dimensionPerspectiveID); } /** * @return the statistics, see {@link #tablePerspectiveStatistics} */ public TablePerspectiveStatistics getContainerStatistics() { return tablePerspectiveStatistics; } /** * invalidate the statistics, e.g. if the underlying data have changed */ public void invalidateContainerStatistics() { tablePerspectiveStatistics = new TablePerspectiveStatistics(this); } /** * Returns the size of the virtual array in the record perspective, i.e. the number of records */ public int getNrRecords() { return recordPerspective.getVirtualArray().size(); } /** Same as {@link #getNrRecords()} for dimensions */ public int getNrDimensions() { return dimensionPerspective.getVirtualArray().size(); } /** * Getter for {@link #label}, creates a default label if none was set * * @return */ @Override public String getLabel() { if (label == null) label = dataDomain.getLabel() + " - " + recordPerspective.getLabel() + "/" + dimensionPerspective.getLabel(); return label; } /** * @param label * setter, see {@link #label} */ @Override public void setLabel(String label, boolean isDefaultLabel) { this.label = label; this.isDefaultLabel = isDefaultLabel; } /** * @param isPrivate * setter, see {@link #isPrivate} */ public void setPrivate(boolean isPrivate) { this.isPrivate = isPrivate; } /** * @return the isPrivate, see {@link #isPrivate} */ public boolean isPrivate() { return isPrivate; } /** * @return the recordGroup, see {@link #recordGroup} */ public Group getRecordGroup() { return recordGroup; } /** * @param recordGroup * setter, see {@link #recordGroup} */ public void setRecordGroup(Group recordGroup) { this.recordGroup = recordGroup; GroupList groupList = new GroupList(); Group group = new Group(recordGroup); groupList.append(group); this.recordPerspective.getVirtualArray().setGroupList(groupList); } /** * Convenience wrapper for {@link #getGroup(IDCategory)} * * @param idType * @return */ public Group getGroup(IDType idType) { return getGroup(idType.getIDCategory()); } /** * Returns the group matching to the specified {@link IDCategory} * * @param idCategory * @return */ public Group getGroup(IDCategory idCategory) { if (idCategory.isOfCategory(recordPerspective.getIdType())) { return recordGroup; } else if (idCategory.isOfCategory(dimensionPerspective.getIdType())) { return dimensionGroup; } throw new IllegalStateException("No group for this category :" + idCategory); } /** * @return the dimensionGroup, see {@link #dimensionGroup} */ public Group getDimensionGroup() { return dimensionGroup; } /** * @param dimensionGroup * setter, see {@link #dimensionGroup} */ public void setDimensionGroup(Group dimensionGroup) { this.dimensionGroup = dimensionGroup; } /** * Creates and returns one new {@link TablePerspective} for each group in the {@link Perspective}, where the new * {@link Perspective} contains the elements of the group. The {@link Perspective} is the same as for this * container. * * @return a new list of new {@link TablePerspective}s or null if no group list is set or only one group is in the * group list. */ public List<TablePerspective> getRecordSubTablePerspectives() { List<TablePerspective> recordSubTablePerspectives = new ArrayList<TablePerspective>(); VirtualArray recordVA = recordPerspective.getVirtualArray(); if (recordVA.getGroupList() == null) return null; GroupList groupList = recordVA.getGroupList(); groupList.updateGroupInfo(); for (Group group : groupList) { if (groupList.size() == 1 && group.isLabelDefault()) group.setLabel("Ungrouped", isLabelDefault());// group.setLabel(getLabel(), isLabelDefault()); List<Integer> indices = recordVA.getIDsOfGroup(group.getGroupIndex()); Perspective recordPerspective = new Perspective(dataDomain, recordVA.getIdType()); recordPerspective.setLabel(group.getLabel(), group.isLabelDefault()); PerspectiveInitializationData data = new PerspectiveInitializationData(); data.setData(indices); recordPerspective.init(data); TablePerspective subTablePerspective = new TablePerspective(dataDomain, recordPerspective, dimensionPerspective); subTablePerspective.setRecordGroup(group); subTablePerspective.setLabel(recordPerspective.getLabel(), recordPerspective.isLabelDefault()); subTablePerspective.parentTablePerspective = this; recordSubTablePerspectives.add(subTablePerspective); } if (recordVA.getGroupList().size() == 1) { recordSubTablePerspectives.get(0).setLabel(label); } return recordSubTablePerspectives; } /** * Creates and returns one new {@link TablePerspective} for each group in the {@link Perspective}, where the new * {@link Perspective} contains the elements of the group. The {@link Perspective} is the same as for this * container. * * @return a new list of new {@link TablePerspective}s or null if no group list is set or only one group is in the * group list. */ public List<TablePerspective> getDimensionSubTablePerspectives() { List<TablePerspective> dimensionSubTablePerspectives = new ArrayList<TablePerspective>(); VirtualArray dimensionVA = dimensionPerspective.getVirtualArray(); if (dimensionVA.getGroupList() == null) return null; GroupList groupList = dimensionVA.getGroupList(); groupList.updateGroupInfo(); for (Group group : groupList) { if (groupList.size() == 1 && group.isLabelDefault()) group.setLabel("Ungrouped", isLabelDefault());// group.setLabel(getLabel(), isLabelDefault()); List<Integer> indices = dimensionVA.getIDsOfGroup(group.getGroupIndex()); Perspective dimensionPerspective = new Perspective(dataDomain, dimensionVA.getIdType()); dimensionPerspective.setLabel(group.getLabel(), group.isLabelDefault()); PerspectiveInitializationData data = new PerspectiveInitializationData(); data.setData(indices); dimensionPerspective.init(data); TablePerspective subTablePerspective = new TablePerspective(dataDomain, recordPerspective, dimensionPerspective); subTablePerspective.setDimensionGroup(group); subTablePerspective.setLabel(group.getLabel(), group.isLabelDefault()); subTablePerspective.parentTablePerspective = this; dimensionSubTablePerspectives.add(subTablePerspective); } if (dimensionVA.getGroupList().size() == 1) { dimensionSubTablePerspectives.get(0).setLabel(label); } return dimensionSubTablePerspectives; } // public void afterUnmarshal(Unmarshaller u, Object parent) { // this.dataDomain = (ATableBasedDataDomain) parent; // } /** * Creates the {@link #tablePerspectiveKey} if both {@link #recordPerspective} and {@link #dimensionPerspective} are * already initialized. */ private void createKey() { if (recordPerspective != null && dimensionPerspective != null) tablePerspectiveKey = createKey(recordPerspective.getPerspectiveID(), dimensionPerspective.getPerspectiveID()); } public static String createKey(String recordPerspectiveID, String dimensionPerspectiveID) { return recordPerspectiveID + "_" + dimensionPerspectiveID; } /** * This should be called after the rest of the data, specifically the perspectives are sucessfully deserialized. * Sets the perspectives based on the serialized perspective IDs. */ public void postDesirialize() { dataDomain = (ATableBasedDataDomain) DataDomainManager.get().getDataDomainByID(dataDomainID); recordPerspective = dataDomain.getTable().getRecordPerspective(recordPerspectiveID); dimensionPerspective = dataDomain.getTable().getDimensionPerspective(dimensionPerspectiveID); createKey(); } @Override public void setLabel(String label) { this.label = label; } @Override public String getProviderName() { return "Table Perspective"; } @Override public boolean isLabelDefault() { return isDefaultLabel; } /** * @return the parentTablePerspective, see {@link #parentTablePerspective} */ public TablePerspective getParentTablePerspective() { return parentTablePerspective; } @Override public String toString() { return "[D: " + dimensionPerspective.toString() + "; R: " + recordPerspective + "]"; } }
package org.cellularautomaton.space.builder; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.cellularautomaton.cell.ICell; import org.cellularautomaton.optimization.AbstractOptimization; import org.cellularautomaton.optimization.Optimizable; import org.cellularautomaton.optimization.OptimizableTest; import org.cellularautomaton.optimization.Optimization; import org.cellularautomaton.optimization.step.OptimizationStep; import org.cellularautomaton.optimization.type.OptimizationType; import org.cellularautomaton.state.AbstractStateFactory; import org.cellularautomaton.state.IStateFactory; import org.cellularautomaton.util.Coords; import org.junit.Test; public class SpaceBuilderTest extends OptimizableTest<SpaceBuilder<String>> { @Override public Optimizable<SpaceBuilder<String>> getOptimizable() { return new SpaceBuilder<String>(); } @Override public Optimization<SpaceBuilder<String>> getRandomOptimization() { class Optimization extends AbstractOptimization<SpaceBuilder<String>> implements OptimizationStep<SpaceBuilder<String>>, OptimizationType<SpaceBuilder<String>> { } return new Optimization(); } @Test public void testIsolatedSpace1D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3, false); // get cells ICell<String> cell0 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell1 = cell0.getNextCellOnDimension(0); ICell<String> cell2 = cell1.getNextCellOnDimension(0); // check cells exclusivity assertSame(cell0, cell0); assertNotSame(cell0, cell1); assertNotSame(cell0, cell2); assertNotSame(cell1, cell0); assertSame(cell1, cell1); assertNotSame(cell1, cell2); assertNotSame(cell2, cell0); assertNotSame(cell2, cell1); assertSame(cell2, cell2); // check no intruders { List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell0); list.add(cell1); list.add(cell2); assertTrue(list.containsAll(cell0.getAllCellsAround())); assertTrue(list.containsAll(cell1.getAllCellsAround())); assertTrue(list.containsAll(cell2.getAllCellsAround())); } // check cells links assertEquals(null, cell0.getPreviousCellOnDimension(0)); assertEquals(cell1, cell0.getNextCellOnDimension(0)); assertEquals(cell0, cell1.getPreviousCellOnDimension(0)); assertEquals(cell2, cell1.getNextCellOnDimension(0)); assertEquals(cell1, cell2.getPreviousCellOnDimension(0)); assertEquals(null, cell2.getNextCellOnDimension(0)); // check coords assertEquals(new Coords(0), cell0.getCoords()); assertEquals(new Coords(1), cell1.getCoords()); assertEquals(new Coords(2), cell2.getCoords()); } @Test public void testCyclicSpace1D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3); // get cells ICell<String> cell0 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell1 = cell0.getNextCellOnDimension(0); ICell<String> cell2 = cell1.getNextCellOnDimension(0); // check cells exclusivity assertSame(cell0, cell0); assertNotSame(cell0, cell1); assertNotSame(cell0, cell2); assertNotSame(cell1, cell0); assertSame(cell1, cell1); assertNotSame(cell1, cell2); assertNotSame(cell2, cell0); assertNotSame(cell2, cell1); assertSame(cell2, cell2); // check no intruders { List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell0); list.add(cell1); list.add(cell2); assertTrue(list.containsAll(cell0.getAllCellsAround())); assertTrue(list.containsAll(cell1.getAllCellsAround())); assertTrue(list.containsAll(cell2.getAllCellsAround())); } // check cells links assertEquals(cell2, cell0.getPreviousCellOnDimension(0)); assertEquals(cell1, cell0.getNextCellOnDimension(0)); assertEquals(cell0, cell1.getPreviousCellOnDimension(0)); assertEquals(cell2, cell1.getNextCellOnDimension(0)); assertEquals(cell1, cell2.getPreviousCellOnDimension(0)); assertEquals(cell0, cell2.getNextCellOnDimension(0)); // check coords assertEquals(new Coords(0), cell0.getCoords()); assertEquals(new Coords(1), cell1.getCoords()); assertEquals(new Coords(2), cell2.getCoords()); } @Test public void testIsolatedSpace2D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3, false).addDimension(3, false); // get cells ICell<String> cell00 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell01 = cell00.getNextCellOnDimension(0); ICell<String> cell02 = cell01.getNextCellOnDimension(0); ICell<String> cell10 = cell00.getNextCellOnDimension(1); ICell<String> cell11 = cell10.getNextCellOnDimension(0); ICell<String> cell12 = cell11.getNextCellOnDimension(0); ICell<String> cell20 = cell10.getNextCellOnDimension(1); ICell<String> cell21 = cell20.getNextCellOnDimension(0); ICell<String> cell22 = cell21.getNextCellOnDimension(0); // check cells exclusivity assertSame(cell00, cell00); assertNotSame(cell00, cell01); assertNotSame(cell00, cell02); assertNotSame(cell00, cell10); assertNotSame(cell00, cell11); assertNotSame(cell00, cell12); assertNotSame(cell00, cell20); assertNotSame(cell00, cell21); assertNotSame(cell00, cell22); assertNotSame(cell01, cell00); assertSame(cell01, cell01); assertNotSame(cell01, cell02); assertNotSame(cell01, cell10); assertNotSame(cell01, cell11); assertNotSame(cell01, cell12); assertNotSame(cell01, cell20); assertNotSame(cell01, cell21); assertNotSame(cell01, cell22); assertNotSame(cell02, cell00); assertNotSame(cell02, cell01); assertSame(cell02, cell02); assertNotSame(cell02, cell10); assertNotSame(cell02, cell11); assertNotSame(cell02, cell12); assertNotSame(cell02, cell20); assertNotSame(cell02, cell21); assertNotSame(cell02, cell22); assertNotSame(cell10, cell00); assertNotSame(cell10, cell01); assertNotSame(cell10, cell02); assertSame(cell10, cell10); assertNotSame(cell10, cell11); assertNotSame(cell10, cell12); assertNotSame(cell10, cell20); assertNotSame(cell10, cell21); assertNotSame(cell10, cell22); assertNotSame(cell11, cell00); assertNotSame(cell11, cell01); assertNotSame(cell11, cell02); assertNotSame(cell11, cell10); assertSame(cell11, cell11); assertNotSame(cell11, cell12); assertNotSame(cell11, cell20); assertNotSame(cell11, cell21); assertNotSame(cell11, cell22); assertNotSame(cell12, cell00); assertNotSame(cell12, cell01); assertNotSame(cell12, cell02); assertNotSame(cell12, cell10); assertNotSame(cell12, cell11); assertSame(cell12, cell12); assertNotSame(cell12, cell20); assertNotSame(cell12, cell21); assertNotSame(cell12, cell22); assertNotSame(cell20, cell00); assertNotSame(cell20, cell01); assertNotSame(cell20, cell02); assertNotSame(cell20, cell10); assertNotSame(cell20, cell11); assertNotSame(cell20, cell12); assertSame(cell20, cell20); assertNotSame(cell20, cell21); assertNotSame(cell20, cell22); assertNotSame(cell21, cell00); assertNotSame(cell21, cell01); assertNotSame(cell21, cell02); assertNotSame(cell21, cell10); assertNotSame(cell21, cell11); assertNotSame(cell21, cell12); assertNotSame(cell21, cell20); assertSame(cell21, cell21); assertNotSame(cell21, cell22); assertNotSame(cell22, cell00); assertNotSame(cell22, cell01); assertNotSame(cell22, cell02); assertNotSame(cell22, cell10); assertNotSame(cell22, cell11); assertNotSame(cell22, cell12); assertNotSame(cell22, cell20); assertNotSame(cell22, cell21); assertSame(cell22, cell22); // check no intruders { List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell00); list.add(cell01); list.add(cell02); list.add(cell10); list.add(cell11); list.add(cell12); list.add(cell20); list.add(cell21); list.add(cell22); assertTrue(list.containsAll(cell00.getAllCellsAround())); assertTrue(list.containsAll(cell01.getAllCellsAround())); assertTrue(list.containsAll(cell02.getAllCellsAround())); assertTrue(list.containsAll(cell10.getAllCellsAround())); assertTrue(list.containsAll(cell11.getAllCellsAround())); assertTrue(list.containsAll(cell12.getAllCellsAround())); assertTrue(list.containsAll(cell20.getAllCellsAround())); assertTrue(list.containsAll(cell21.getAllCellsAround())); assertTrue(list.containsAll(cell22.getAllCellsAround())); } // check cells links assertEquals(null, cell00.getPreviousCellOnDimension(0)); assertEquals(cell01, cell00.getNextCellOnDimension(0)); assertEquals(null, cell00.getPreviousCellOnDimension(1)); assertEquals(cell10, cell00.getNextCellOnDimension(1)); assertEquals(cell00, cell01.getPreviousCellOnDimension(0)); assertEquals(cell02, cell01.getNextCellOnDimension(0)); assertEquals(null, cell01.getPreviousCellOnDimension(1)); assertEquals(cell11, cell01.getNextCellOnDimension(1)); assertEquals(cell01, cell02.getPreviousCellOnDimension(0)); assertEquals(null, cell02.getNextCellOnDimension(0)); assertEquals(null, cell02.getPreviousCellOnDimension(1)); assertEquals(cell12, cell02.getNextCellOnDimension(1)); assertEquals(null, cell10.getPreviousCellOnDimension(0)); assertEquals(cell11, cell10.getNextCellOnDimension(0)); assertEquals(cell00, cell10.getPreviousCellOnDimension(1)); assertEquals(cell20, cell10.getNextCellOnDimension(1)); assertEquals(cell10, cell11.getPreviousCellOnDimension(0)); assertEquals(cell12, cell11.getNextCellOnDimension(0)); assertEquals(cell01, cell11.getPreviousCellOnDimension(1)); assertEquals(cell21, cell11.getNextCellOnDimension(1)); assertEquals(cell11, cell12.getPreviousCellOnDimension(0)); assertEquals(null, cell12.getNextCellOnDimension(0)); assertEquals(cell02, cell12.getPreviousCellOnDimension(1)); assertEquals(cell22, cell12.getNextCellOnDimension(1)); assertEquals(null, cell20.getPreviousCellOnDimension(0)); assertEquals(cell21, cell20.getNextCellOnDimension(0)); assertEquals(cell10, cell20.getPreviousCellOnDimension(1)); assertEquals(null, cell20.getNextCellOnDimension(1)); assertEquals(cell20, cell21.getPreviousCellOnDimension(0)); assertEquals(cell22, cell21.getNextCellOnDimension(0)); assertEquals(cell11, cell21.getPreviousCellOnDimension(1)); assertEquals(null, cell21.getNextCellOnDimension(1)); assertEquals(cell21, cell22.getPreviousCellOnDimension(0)); assertEquals(null, cell22.getNextCellOnDimension(0)); assertEquals(cell12, cell22.getPreviousCellOnDimension(1)); assertEquals(null, cell22.getNextCellOnDimension(1)); // check coords assertEquals(new Coords(0, 0), cell00.getCoords()); assertEquals(new Coords(1, 0), cell01.getCoords()); assertEquals(new Coords(2, 0), cell02.getCoords()); assertEquals(new Coords(0, 1), cell10.getCoords()); assertEquals(new Coords(1, 1), cell11.getCoords()); assertEquals(new Coords(2, 1), cell12.getCoords()); assertEquals(new Coords(0, 2), cell20.getCoords()); assertEquals(new Coords(1, 2), cell21.getCoords()); assertEquals(new Coords(2, 2), cell22.getCoords()); } @Test public void testCyclicSpace2D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3).addDimension(3); // get cells ICell<String> cell00 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell01 = cell00.getNextCellOnDimension(0); ICell<String> cell02 = cell01.getNextCellOnDimension(0); ICell<String> cell10 = cell00.getNextCellOnDimension(1); ICell<String> cell11 = cell10.getNextCellOnDimension(0); ICell<String> cell12 = cell11.getNextCellOnDimension(0); ICell<String> cell20 = cell10.getNextCellOnDimension(1); ICell<String> cell21 = cell20.getNextCellOnDimension(0); ICell<String> cell22 = cell21.getNextCellOnDimension(0); // check cells exclusivity assertSame(cell00, cell00); assertNotSame(cell00, cell01); assertNotSame(cell00, cell02); assertNotSame(cell00, cell10); assertNotSame(cell00, cell11); assertNotSame(cell00, cell12); assertNotSame(cell00, cell20); assertNotSame(cell00, cell21); assertNotSame(cell00, cell22); assertNotSame(cell01, cell00); assertSame(cell01, cell01); assertNotSame(cell01, cell02); assertNotSame(cell01, cell10); assertNotSame(cell01, cell11); assertNotSame(cell01, cell12); assertNotSame(cell01, cell20); assertNotSame(cell01, cell21); assertNotSame(cell01, cell22); assertNotSame(cell02, cell00); assertNotSame(cell02, cell01); assertSame(cell02, cell02); assertNotSame(cell02, cell10); assertNotSame(cell02, cell11); assertNotSame(cell02, cell12); assertNotSame(cell02, cell20); assertNotSame(cell02, cell21); assertNotSame(cell02, cell22); assertNotSame(cell10, cell00); assertNotSame(cell10, cell01); assertNotSame(cell10, cell02); assertSame(cell10, cell10); assertNotSame(cell10, cell11); assertNotSame(cell10, cell12); assertNotSame(cell10, cell20); assertNotSame(cell10, cell21); assertNotSame(cell10, cell22); assertNotSame(cell11, cell00); assertNotSame(cell11, cell01); assertNotSame(cell11, cell02); assertNotSame(cell11, cell10); assertSame(cell11, cell11); assertNotSame(cell11, cell12); assertNotSame(cell11, cell20); assertNotSame(cell11, cell21); assertNotSame(cell11, cell22); assertNotSame(cell12, cell00); assertNotSame(cell12, cell01); assertNotSame(cell12, cell02); assertNotSame(cell12, cell10); assertNotSame(cell12, cell11); assertSame(cell12, cell12); assertNotSame(cell12, cell20); assertNotSame(cell12, cell21); assertNotSame(cell12, cell22); assertNotSame(cell20, cell00); assertNotSame(cell20, cell01); assertNotSame(cell20, cell02); assertNotSame(cell20, cell10); assertNotSame(cell20, cell11); assertNotSame(cell20, cell12); assertSame(cell20, cell20); assertNotSame(cell20, cell21); assertNotSame(cell20, cell22); assertNotSame(cell21, cell00); assertNotSame(cell21, cell01); assertNotSame(cell21, cell02); assertNotSame(cell21, cell10); assertNotSame(cell21, cell11); assertNotSame(cell21, cell12); assertNotSame(cell21, cell20); assertSame(cell21, cell21); assertNotSame(cell21, cell22); assertNotSame(cell22, cell00); assertNotSame(cell22, cell01); assertNotSame(cell22, cell02); assertNotSame(cell22, cell10); assertNotSame(cell22, cell11); assertNotSame(cell22, cell12); assertNotSame(cell22, cell20); assertNotSame(cell22, cell21); assertSame(cell22, cell22); // check no intruders { List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell00); list.add(cell01); list.add(cell02); list.add(cell10); list.add(cell11); list.add(cell12); list.add(cell20); list.add(cell21); list.add(cell22); assertTrue(list.containsAll(cell00.getAllCellsAround())); assertTrue(list.containsAll(cell01.getAllCellsAround())); assertTrue(list.containsAll(cell02.getAllCellsAround())); assertTrue(list.containsAll(cell10.getAllCellsAround())); assertTrue(list.containsAll(cell11.getAllCellsAround())); assertTrue(list.containsAll(cell12.getAllCellsAround())); assertTrue(list.containsAll(cell20.getAllCellsAround())); assertTrue(list.containsAll(cell21.getAllCellsAround())); assertTrue(list.containsAll(cell22.getAllCellsAround())); } // check cells links assertEquals(cell02, cell00.getPreviousCellOnDimension(0)); assertEquals(cell01, cell00.getNextCellOnDimension(0)); assertEquals(cell20, cell00.getPreviousCellOnDimension(1)); assertEquals(cell10, cell00.getNextCellOnDimension(1)); assertEquals(cell00, cell01.getPreviousCellOnDimension(0)); assertEquals(cell02, cell01.getNextCellOnDimension(0)); assertEquals(cell21, cell01.getPreviousCellOnDimension(1)); assertEquals(cell11, cell01.getNextCellOnDimension(1)); assertEquals(cell01, cell02.getPreviousCellOnDimension(0)); assertEquals(cell00, cell02.getNextCellOnDimension(0)); assertEquals(cell22, cell02.getPreviousCellOnDimension(1)); assertEquals(cell12, cell02.getNextCellOnDimension(1)); assertEquals(cell12, cell10.getPreviousCellOnDimension(0)); assertEquals(cell11, cell10.getNextCellOnDimension(0)); assertEquals(cell00, cell10.getPreviousCellOnDimension(1)); assertEquals(cell20, cell10.getNextCellOnDimension(1)); assertEquals(cell10, cell11.getPreviousCellOnDimension(0)); assertEquals(cell12, cell11.getNextCellOnDimension(0)); assertEquals(cell01, cell11.getPreviousCellOnDimension(1)); assertEquals(cell21, cell11.getNextCellOnDimension(1)); assertEquals(cell11, cell12.getPreviousCellOnDimension(0)); assertEquals(cell10, cell12.getNextCellOnDimension(0)); assertEquals(cell02, cell12.getPreviousCellOnDimension(1)); assertEquals(cell22, cell12.getNextCellOnDimension(1)); assertEquals(cell22, cell20.getPreviousCellOnDimension(0)); assertEquals(cell21, cell20.getNextCellOnDimension(0)); assertEquals(cell10, cell20.getPreviousCellOnDimension(1)); assertEquals(cell00, cell20.getNextCellOnDimension(1)); assertEquals(cell20, cell21.getPreviousCellOnDimension(0)); assertEquals(cell22, cell21.getNextCellOnDimension(0)); assertEquals(cell11, cell21.getPreviousCellOnDimension(1)); assertEquals(cell01, cell21.getNextCellOnDimension(1)); assertEquals(cell21, cell22.getPreviousCellOnDimension(0)); assertEquals(cell20, cell22.getNextCellOnDimension(0)); assertEquals(cell12, cell22.getPreviousCellOnDimension(1)); assertEquals(cell02, cell22.getNextCellOnDimension(1)); // check coords assertEquals(new Coords(0, 0), cell00.getCoords()); assertEquals(new Coords(1, 0), cell01.getCoords()); assertEquals(new Coords(2, 0), cell02.getCoords()); assertEquals(new Coords(0, 1), cell10.getCoords()); assertEquals(new Coords(1, 1), cell11.getCoords()); assertEquals(new Coords(2, 1), cell12.getCoords()); assertEquals(new Coords(0, 2), cell20.getCoords()); assertEquals(new Coords(1, 2), cell21.getCoords()); assertEquals(new Coords(2, 2), cell22.getCoords()); } @Test public void testSemiCyclicSpace2D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3, false).addDimension(3); // get cells ICell<String> cell00 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell01 = cell00.getNextCellOnDimension(0); ICell<String> cell02 = cell01.getNextCellOnDimension(0); ICell<String> cell10 = cell00.getNextCellOnDimension(1); ICell<String> cell11 = cell10.getNextCellOnDimension(0); ICell<String> cell12 = cell11.getNextCellOnDimension(0); ICell<String> cell20 = cell10.getNextCellOnDimension(1); ICell<String> cell21 = cell20.getNextCellOnDimension(0); ICell<String> cell22 = cell21.getNextCellOnDimension(0); // check cells exclusivity assertSame(cell00, cell00); assertNotSame(cell00, cell01); assertNotSame(cell00, cell02); assertNotSame(cell00, cell10); assertNotSame(cell00, cell11); assertNotSame(cell00, cell12); assertNotSame(cell00, cell20); assertNotSame(cell00, cell21); assertNotSame(cell00, cell22); assertNotSame(cell01, cell00); assertSame(cell01, cell01); assertNotSame(cell01, cell02); assertNotSame(cell01, cell10); assertNotSame(cell01, cell11); assertNotSame(cell01, cell12); assertNotSame(cell01, cell20); assertNotSame(cell01, cell21); assertNotSame(cell01, cell22); assertNotSame(cell02, cell00); assertNotSame(cell02, cell01); assertSame(cell02, cell02); assertNotSame(cell02, cell10); assertNotSame(cell02, cell11); assertNotSame(cell02, cell12); assertNotSame(cell02, cell20); assertNotSame(cell02, cell21); assertNotSame(cell02, cell22); assertNotSame(cell10, cell00); assertNotSame(cell10, cell01); assertNotSame(cell10, cell02); assertSame(cell10, cell10); assertNotSame(cell10, cell11); assertNotSame(cell10, cell12); assertNotSame(cell10, cell20); assertNotSame(cell10, cell21); assertNotSame(cell10, cell22); assertNotSame(cell11, cell00); assertNotSame(cell11, cell01); assertNotSame(cell11, cell02); assertNotSame(cell11, cell10); assertSame(cell11, cell11); assertNotSame(cell11, cell12); assertNotSame(cell11, cell20); assertNotSame(cell11, cell21); assertNotSame(cell11, cell22); assertNotSame(cell12, cell00); assertNotSame(cell12, cell01); assertNotSame(cell12, cell02); assertNotSame(cell12, cell10); assertNotSame(cell12, cell11); assertSame(cell12, cell12); assertNotSame(cell12, cell20); assertNotSame(cell12, cell21); assertNotSame(cell12, cell22); assertNotSame(cell20, cell00); assertNotSame(cell20, cell01); assertNotSame(cell20, cell02); assertNotSame(cell20, cell10); assertNotSame(cell20, cell11); assertNotSame(cell20, cell12); assertSame(cell20, cell20); assertNotSame(cell20, cell21); assertNotSame(cell20, cell22); assertNotSame(cell21, cell00); assertNotSame(cell21, cell01); assertNotSame(cell21, cell02); assertNotSame(cell21, cell10); assertNotSame(cell21, cell11); assertNotSame(cell21, cell12); assertNotSame(cell21, cell20); assertSame(cell21, cell21); assertNotSame(cell21, cell22); assertNotSame(cell22, cell00); assertNotSame(cell22, cell01); assertNotSame(cell22, cell02); assertNotSame(cell22, cell10); assertNotSame(cell22, cell11); assertNotSame(cell22, cell12); assertNotSame(cell22, cell20); assertNotSame(cell22, cell21); assertSame(cell22, cell22); // check no intruders { List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell00); list.add(cell01); list.add(cell02); list.add(cell10); list.add(cell11); list.add(cell12); list.add(cell20); list.add(cell21); list.add(cell22); assertTrue(list.containsAll(cell00.getAllCellsAround())); assertTrue(list.containsAll(cell01.getAllCellsAround())); assertTrue(list.containsAll(cell02.getAllCellsAround())); assertTrue(list.containsAll(cell10.getAllCellsAround())); assertTrue(list.containsAll(cell11.getAllCellsAround())); assertTrue(list.containsAll(cell12.getAllCellsAround())); assertTrue(list.containsAll(cell20.getAllCellsAround())); assertTrue(list.containsAll(cell21.getAllCellsAround())); assertTrue(list.containsAll(cell22.getAllCellsAround())); } // check cells links assertEquals(null, cell00.getPreviousCellOnDimension(0)); assertEquals(cell01, cell00.getNextCellOnDimension(0)); assertEquals(cell20, cell00.getPreviousCellOnDimension(1)); assertEquals(cell10, cell00.getNextCellOnDimension(1)); assertEquals(cell00, cell01.getPreviousCellOnDimension(0)); assertEquals(cell02, cell01.getNextCellOnDimension(0)); assertEquals(cell21, cell01.getPreviousCellOnDimension(1)); assertEquals(cell11, cell01.getNextCellOnDimension(1)); assertEquals(cell01, cell02.getPreviousCellOnDimension(0)); assertEquals(null, cell02.getNextCellOnDimension(0)); assertEquals(cell22, cell02.getPreviousCellOnDimension(1)); assertEquals(cell12, cell02.getNextCellOnDimension(1)); assertEquals(null, cell10.getPreviousCellOnDimension(0)); assertEquals(cell11, cell10.getNextCellOnDimension(0)); assertEquals(cell00, cell10.getPreviousCellOnDimension(1)); assertEquals(cell20, cell10.getNextCellOnDimension(1)); assertEquals(cell10, cell11.getPreviousCellOnDimension(0)); assertEquals(cell12, cell11.getNextCellOnDimension(0)); assertEquals(cell01, cell11.getPreviousCellOnDimension(1)); assertEquals(cell21, cell11.getNextCellOnDimension(1)); assertEquals(cell11, cell12.getPreviousCellOnDimension(0)); assertEquals(null, cell12.getNextCellOnDimension(0)); assertEquals(cell02, cell12.getPreviousCellOnDimension(1)); assertEquals(cell22, cell12.getNextCellOnDimension(1)); assertEquals(null, cell20.getPreviousCellOnDimension(0)); assertEquals(cell21, cell20.getNextCellOnDimension(0)); assertEquals(cell10, cell20.getPreviousCellOnDimension(1)); assertEquals(cell00, cell20.getNextCellOnDimension(1)); assertEquals(cell20, cell21.getPreviousCellOnDimension(0)); assertEquals(cell22, cell21.getNextCellOnDimension(0)); assertEquals(cell11, cell21.getPreviousCellOnDimension(1)); assertEquals(cell01, cell21.getNextCellOnDimension(1)); assertEquals(cell21, cell22.getPreviousCellOnDimension(0)); assertEquals(null, cell22.getNextCellOnDimension(0)); assertEquals(cell12, cell22.getPreviousCellOnDimension(1)); assertEquals(cell02, cell22.getNextCellOnDimension(1)); // check coords assertEquals(new Coords(0, 0), cell00.getCoords()); assertEquals(new Coords(1, 0), cell01.getCoords()); assertEquals(new Coords(2, 0), cell02.getCoords()); assertEquals(new Coords(0, 1), cell10.getCoords()); assertEquals(new Coords(1, 1), cell11.getCoords()); assertEquals(new Coords(2, 1), cell12.getCoords()); assertEquals(new Coords(0, 2), cell20.getCoords()); assertEquals(new Coords(1, 2), cell21.getCoords()); assertEquals(new Coords(2, 2), cell22.getCoords()); } @Test public void testIsolatedSpace3D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3, false).addDimension(3, false) .addDimension(3, false); // get cells ICell<String> cell000 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell001 = cell000.getNextCellOnDimension(0); ICell<String> cell002 = cell001.getNextCellOnDimension(0); ICell<String> cell010 = cell000.getNextCellOnDimension(1); ICell<String> cell011 = cell010.getNextCellOnDimension(0); ICell<String> cell012 = cell011.getNextCellOnDimension(0); ICell<String> cell020 = cell010.getNextCellOnDimension(1); ICell<String> cell021 = cell020.getNextCellOnDimension(0); ICell<String> cell022 = cell021.getNextCellOnDimension(0); ICell<String> cell100 = cell000.getNextCellOnDimension(2); ICell<String> cell101 = cell100.getNextCellOnDimension(0); ICell<String> cell102 = cell101.getNextCellOnDimension(0); ICell<String> cell110 = cell100.getNextCellOnDimension(1); ICell<String> cell111 = cell110.getNextCellOnDimension(0); ICell<String> cell112 = cell111.getNextCellOnDimension(0); ICell<String> cell120 = cell110.getNextCellOnDimension(1); ICell<String> cell121 = cell120.getNextCellOnDimension(0); ICell<String> cell122 = cell121.getNextCellOnDimension(0); ICell<String> cell200 = cell100.getNextCellOnDimension(2); ICell<String> cell201 = cell200.getNextCellOnDimension(0); ICell<String> cell202 = cell201.getNextCellOnDimension(0); ICell<String> cell210 = cell200.getNextCellOnDimension(1); ICell<String> cell211 = cell210.getNextCellOnDimension(0); ICell<String> cell212 = cell211.getNextCellOnDimension(0); ICell<String> cell220 = cell210.getNextCellOnDimension(1); ICell<String> cell221 = cell220.getNextCellOnDimension(0); ICell<String> cell222 = cell221.getNextCellOnDimension(0); // check no intruders List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell000); list.add(cell001); list.add(cell002); list.add(cell010); list.add(cell011); list.add(cell012); list.add(cell020); list.add(cell021); list.add(cell022); list.add(cell100); list.add(cell101); list.add(cell102); list.add(cell110); list.add(cell111); list.add(cell112); list.add(cell120); list.add(cell121); list.add(cell122); list.add(cell200); list.add(cell201); list.add(cell202); list.add(cell210); list.add(cell211); list.add(cell212); list.add(cell220); list.add(cell221); list.add(cell222); assertTrue(list.containsAll(cell000.getAllCellsAround())); assertTrue(list.containsAll(cell001.getAllCellsAround())); assertTrue(list.containsAll(cell002.getAllCellsAround())); assertTrue(list.containsAll(cell010.getAllCellsAround())); assertTrue(list.containsAll(cell011.getAllCellsAround())); assertTrue(list.containsAll(cell012.getAllCellsAround())); assertTrue(list.containsAll(cell020.getAllCellsAround())); assertTrue(list.containsAll(cell021.getAllCellsAround())); assertTrue(list.containsAll(cell022.getAllCellsAround())); assertTrue(list.containsAll(cell100.getAllCellsAround())); assertTrue(list.containsAll(cell101.getAllCellsAround())); assertTrue(list.containsAll(cell102.getAllCellsAround())); assertTrue(list.containsAll(cell110.getAllCellsAround())); assertTrue(list.containsAll(cell111.getAllCellsAround())); assertTrue(list.containsAll(cell112.getAllCellsAround())); assertTrue(list.containsAll(cell120.getAllCellsAround())); assertTrue(list.containsAll(cell121.getAllCellsAround())); assertTrue(list.containsAll(cell122.getAllCellsAround())); assertTrue(list.containsAll(cell200.getAllCellsAround())); assertTrue(list.containsAll(cell201.getAllCellsAround())); assertTrue(list.containsAll(cell202.getAllCellsAround())); assertTrue(list.containsAll(cell210.getAllCellsAround())); assertTrue(list.containsAll(cell211.getAllCellsAround())); assertTrue(list.containsAll(cell212.getAllCellsAround())); assertTrue(list.containsAll(cell220.getAllCellsAround())); assertTrue(list.containsAll(cell221.getAllCellsAround())); assertTrue(list.containsAll(cell222.getAllCellsAround())); // check cells exclusivity List<ICell<String>> cells = new ArrayList<ICell<String>>(list); for (int i = 0; i < 27; i++) { ICell<String> expected = cells.get(i); for (int j = 0; j < 27; j++) { ICell<String> result = cells.get(j); if (i == j) { assertSame(expected, result); } else { assertNotSame(expected, result); } } } // check cells links assertEquals(null, cell000.getPreviousCellOnDimension(0)); assertEquals(cell001, cell000.getNextCellOnDimension(0)); assertEquals(null, cell000.getPreviousCellOnDimension(1)); assertEquals(cell010, cell000.getNextCellOnDimension(1)); assertEquals(null, cell000.getPreviousCellOnDimension(2)); assertEquals(cell100, cell000.getNextCellOnDimension(2)); assertEquals(cell000, cell001.getPreviousCellOnDimension(0)); assertEquals(cell002, cell001.getNextCellOnDimension(0)); assertEquals(null, cell001.getPreviousCellOnDimension(1)); assertEquals(cell011, cell001.getNextCellOnDimension(1)); assertEquals(null, cell001.getPreviousCellOnDimension(2)); assertEquals(cell101, cell001.getNextCellOnDimension(2)); assertEquals(cell001, cell002.getPreviousCellOnDimension(0)); assertEquals(null, cell002.getNextCellOnDimension(0)); assertEquals(null, cell002.getPreviousCellOnDimension(1)); assertEquals(cell012, cell002.getNextCellOnDimension(1)); assertEquals(null, cell002.getPreviousCellOnDimension(2)); assertEquals(cell102, cell002.getNextCellOnDimension(2)); assertEquals(null, cell010.getPreviousCellOnDimension(0)); assertEquals(cell011, cell010.getNextCellOnDimension(0)); assertEquals(cell000, cell010.getPreviousCellOnDimension(1)); assertEquals(cell020, cell010.getNextCellOnDimension(1)); assertEquals(null, cell010.getPreviousCellOnDimension(2)); assertEquals(cell110, cell010.getNextCellOnDimension(2)); assertEquals(cell010, cell011.getPreviousCellOnDimension(0)); assertEquals(cell012, cell011.getNextCellOnDimension(0)); assertEquals(cell001, cell011.getPreviousCellOnDimension(1)); assertEquals(cell021, cell011.getNextCellOnDimension(1)); assertEquals(null, cell011.getPreviousCellOnDimension(2)); assertEquals(cell111, cell011.getNextCellOnDimension(2)); assertEquals(cell011, cell012.getPreviousCellOnDimension(0)); assertEquals(null, cell012.getNextCellOnDimension(0)); assertEquals(cell002, cell012.getPreviousCellOnDimension(1)); assertEquals(cell022, cell012.getNextCellOnDimension(1)); assertEquals(null, cell012.getPreviousCellOnDimension(2)); assertEquals(cell112, cell012.getNextCellOnDimension(2)); assertEquals(null, cell020.getPreviousCellOnDimension(0)); assertEquals(cell021, cell020.getNextCellOnDimension(0)); assertEquals(cell010, cell020.getPreviousCellOnDimension(1)); assertEquals(null, cell020.getNextCellOnDimension(1)); assertEquals(null, cell020.getPreviousCellOnDimension(2)); assertEquals(cell120, cell020.getNextCellOnDimension(2)); assertEquals(cell020, cell021.getPreviousCellOnDimension(0)); assertEquals(cell022, cell021.getNextCellOnDimension(0)); assertEquals(cell011, cell021.getPreviousCellOnDimension(1)); assertEquals(null, cell021.getNextCellOnDimension(1)); assertEquals(null, cell021.getPreviousCellOnDimension(2)); assertEquals(cell121, cell021.getNextCellOnDimension(2)); assertEquals(cell021, cell022.getPreviousCellOnDimension(0)); assertEquals(null, cell022.getNextCellOnDimension(0)); assertEquals(cell012, cell022.getPreviousCellOnDimension(1)); assertEquals(null, cell022.getNextCellOnDimension(1)); assertEquals(null, cell022.getPreviousCellOnDimension(2)); assertEquals(cell122, cell022.getNextCellOnDimension(2)); assertEquals(null, cell100.getPreviousCellOnDimension(0)); assertEquals(cell101, cell100.getNextCellOnDimension(0)); assertEquals(null, cell100.getPreviousCellOnDimension(1)); assertEquals(cell110, cell100.getNextCellOnDimension(1)); assertEquals(cell000, cell100.getPreviousCellOnDimension(2)); assertEquals(cell200, cell100.getNextCellOnDimension(2)); assertEquals(cell100, cell101.getPreviousCellOnDimension(0)); assertEquals(cell102, cell101.getNextCellOnDimension(0)); assertEquals(null, cell101.getPreviousCellOnDimension(1)); assertEquals(cell111, cell101.getNextCellOnDimension(1)); assertEquals(cell001, cell101.getPreviousCellOnDimension(2)); assertEquals(cell201, cell101.getNextCellOnDimension(2)); assertEquals(cell101, cell102.getPreviousCellOnDimension(0)); assertEquals(null, cell102.getNextCellOnDimension(0)); assertEquals(null, cell102.getPreviousCellOnDimension(1)); assertEquals(cell112, cell102.getNextCellOnDimension(1)); assertEquals(cell002, cell102.getPreviousCellOnDimension(2)); assertEquals(cell202, cell102.getNextCellOnDimension(2)); assertEquals(null, cell110.getPreviousCellOnDimension(0)); assertEquals(cell111, cell110.getNextCellOnDimension(0)); assertEquals(cell100, cell110.getPreviousCellOnDimension(1)); assertEquals(cell120, cell110.getNextCellOnDimension(1)); assertEquals(cell010, cell110.getPreviousCellOnDimension(2)); assertEquals(cell210, cell110.getNextCellOnDimension(2)); assertEquals(cell110, cell111.getPreviousCellOnDimension(0)); assertEquals(cell112, cell111.getNextCellOnDimension(0)); assertEquals(cell101, cell111.getPreviousCellOnDimension(1)); assertEquals(cell121, cell111.getNextCellOnDimension(1)); assertEquals(cell011, cell111.getPreviousCellOnDimension(2)); assertEquals(cell211, cell111.getNextCellOnDimension(2)); assertEquals(cell111, cell112.getPreviousCellOnDimension(0)); assertEquals(null, cell112.getNextCellOnDimension(0)); assertEquals(cell102, cell112.getPreviousCellOnDimension(1)); assertEquals(cell122, cell112.getNextCellOnDimension(1)); assertEquals(cell012, cell112.getPreviousCellOnDimension(2)); assertEquals(cell212, cell112.getNextCellOnDimension(2)); assertEquals(null, cell120.getPreviousCellOnDimension(0)); assertEquals(cell121, cell120.getNextCellOnDimension(0)); assertEquals(cell110, cell120.getPreviousCellOnDimension(1)); assertEquals(null, cell120.getNextCellOnDimension(1)); assertEquals(cell020, cell120.getPreviousCellOnDimension(2)); assertEquals(cell220, cell120.getNextCellOnDimension(2)); assertEquals(cell120, cell121.getPreviousCellOnDimension(0)); assertEquals(cell122, cell121.getNextCellOnDimension(0)); assertEquals(cell111, cell121.getPreviousCellOnDimension(1)); assertEquals(null, cell121.getNextCellOnDimension(1)); assertEquals(cell021, cell121.getPreviousCellOnDimension(2)); assertEquals(cell221, cell121.getNextCellOnDimension(2)); assertEquals(cell121, cell122.getPreviousCellOnDimension(0)); assertEquals(null, cell122.getNextCellOnDimension(0)); assertEquals(cell112, cell122.getPreviousCellOnDimension(1)); assertEquals(null, cell122.getNextCellOnDimension(1)); assertEquals(cell022, cell122.getPreviousCellOnDimension(2)); assertEquals(cell222, cell122.getNextCellOnDimension(2)); assertEquals(null, cell200.getPreviousCellOnDimension(0)); assertEquals(cell201, cell200.getNextCellOnDimension(0)); assertEquals(null, cell200.getPreviousCellOnDimension(1)); assertEquals(cell210, cell200.getNextCellOnDimension(1)); assertEquals(cell100, cell200.getPreviousCellOnDimension(2)); assertEquals(null, cell200.getNextCellOnDimension(2)); assertEquals(cell200, cell201.getPreviousCellOnDimension(0)); assertEquals(cell202, cell201.getNextCellOnDimension(0)); assertEquals(null, cell201.getPreviousCellOnDimension(1)); assertEquals(cell211, cell201.getNextCellOnDimension(1)); assertEquals(cell101, cell201.getPreviousCellOnDimension(2)); assertEquals(null, cell201.getNextCellOnDimension(2)); assertEquals(cell201, cell202.getPreviousCellOnDimension(0)); assertEquals(null, cell202.getNextCellOnDimension(0)); assertEquals(null, cell202.getPreviousCellOnDimension(1)); assertEquals(cell212, cell202.getNextCellOnDimension(1)); assertEquals(cell102, cell202.getPreviousCellOnDimension(2)); assertEquals(null, cell202.getNextCellOnDimension(2)); assertEquals(null, cell210.getPreviousCellOnDimension(0)); assertEquals(cell211, cell210.getNextCellOnDimension(0)); assertEquals(cell200, cell210.getPreviousCellOnDimension(1)); assertEquals(cell220, cell210.getNextCellOnDimension(1)); assertEquals(cell110, cell210.getPreviousCellOnDimension(2)); assertEquals(null, cell210.getNextCellOnDimension(2)); assertEquals(cell210, cell211.getPreviousCellOnDimension(0)); assertEquals(cell212, cell211.getNextCellOnDimension(0)); assertEquals(cell201, cell211.getPreviousCellOnDimension(1)); assertEquals(cell221, cell211.getNextCellOnDimension(1)); assertEquals(cell111, cell211.getPreviousCellOnDimension(2)); assertEquals(null, cell211.getNextCellOnDimension(2)); assertEquals(cell211, cell212.getPreviousCellOnDimension(0)); assertEquals(null, cell212.getNextCellOnDimension(0)); assertEquals(cell202, cell212.getPreviousCellOnDimension(1)); assertEquals(cell222, cell212.getNextCellOnDimension(1)); assertEquals(cell112, cell212.getPreviousCellOnDimension(2)); assertEquals(null, cell212.getNextCellOnDimension(2)); assertEquals(null, cell220.getPreviousCellOnDimension(0)); assertEquals(cell221, cell220.getNextCellOnDimension(0)); assertEquals(cell210, cell220.getPreviousCellOnDimension(1)); assertEquals(null, cell220.getNextCellOnDimension(1)); assertEquals(cell120, cell220.getPreviousCellOnDimension(2)); assertEquals(null, cell220.getNextCellOnDimension(2)); assertEquals(cell220, cell221.getPreviousCellOnDimension(0)); assertEquals(cell222, cell221.getNextCellOnDimension(0)); assertEquals(cell211, cell221.getPreviousCellOnDimension(1)); assertEquals(null, cell221.getNextCellOnDimension(1)); assertEquals(cell121, cell221.getPreviousCellOnDimension(2)); assertEquals(null, cell221.getNextCellOnDimension(2)); assertEquals(cell221, cell222.getPreviousCellOnDimension(0)); assertEquals(null, cell222.getNextCellOnDimension(0)); assertEquals(cell212, cell222.getPreviousCellOnDimension(1)); assertEquals(null, cell222.getNextCellOnDimension(1)); assertEquals(cell122, cell222.getPreviousCellOnDimension(2)); assertEquals(null, cell222.getNextCellOnDimension(2)); // check coords assertEquals(new Coords(0, 0, 0), cell000.getCoords()); assertEquals(new Coords(1, 0, 0), cell001.getCoords()); assertEquals(new Coords(2, 0, 0), cell002.getCoords()); assertEquals(new Coords(0, 1, 0), cell010.getCoords()); assertEquals(new Coords(1, 1, 0), cell011.getCoords()); assertEquals(new Coords(2, 1, 0), cell012.getCoords()); assertEquals(new Coords(0, 2, 0), cell020.getCoords()); assertEquals(new Coords(1, 2, 0), cell021.getCoords()); assertEquals(new Coords(2, 2, 0), cell022.getCoords()); assertEquals(new Coords(0, 0, 1), cell100.getCoords()); assertEquals(new Coords(1, 0, 1), cell101.getCoords()); assertEquals(new Coords(2, 0, 1), cell102.getCoords()); assertEquals(new Coords(0, 1, 1), cell110.getCoords()); assertEquals(new Coords(1, 1, 1), cell111.getCoords()); assertEquals(new Coords(2, 1, 1), cell112.getCoords()); assertEquals(new Coords(0, 2, 1), cell120.getCoords()); assertEquals(new Coords(1, 2, 1), cell121.getCoords()); assertEquals(new Coords(2, 2, 1), cell122.getCoords()); assertEquals(new Coords(0, 0, 2), cell200.getCoords()); assertEquals(new Coords(1, 0, 2), cell201.getCoords()); assertEquals(new Coords(2, 0, 2), cell202.getCoords()); assertEquals(new Coords(0, 1, 2), cell210.getCoords()); assertEquals(new Coords(1, 1, 2), cell211.getCoords()); assertEquals(new Coords(2, 1, 2), cell212.getCoords()); assertEquals(new Coords(0, 2, 2), cell220.getCoords()); assertEquals(new Coords(1, 2, 2), cell221.getCoords()); assertEquals(new Coords(2, 2, 2), cell222.getCoords()); } @Test public void testCyclicSpace3D() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).setMemorySize(1).createNewSpace() .addDimension(3).addDimension(3).addDimension(3); // get cells ICell<String> cell000 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell001 = cell000.getNextCellOnDimension(0); ICell<String> cell002 = cell001.getNextCellOnDimension(0); ICell<String> cell010 = cell000.getNextCellOnDimension(1); ICell<String> cell011 = cell010.getNextCellOnDimension(0); ICell<String> cell012 = cell011.getNextCellOnDimension(0); ICell<String> cell020 = cell010.getNextCellOnDimension(1); ICell<String> cell021 = cell020.getNextCellOnDimension(0); ICell<String> cell022 = cell021.getNextCellOnDimension(0); ICell<String> cell100 = cell000.getNextCellOnDimension(2); ICell<String> cell101 = cell100.getNextCellOnDimension(0); ICell<String> cell102 = cell101.getNextCellOnDimension(0); ICell<String> cell110 = cell100.getNextCellOnDimension(1); ICell<String> cell111 = cell110.getNextCellOnDimension(0); ICell<String> cell112 = cell111.getNextCellOnDimension(0); ICell<String> cell120 = cell110.getNextCellOnDimension(1); ICell<String> cell121 = cell120.getNextCellOnDimension(0); ICell<String> cell122 = cell121.getNextCellOnDimension(0); ICell<String> cell200 = cell100.getNextCellOnDimension(2); ICell<String> cell201 = cell200.getNextCellOnDimension(0); ICell<String> cell202 = cell201.getNextCellOnDimension(0); ICell<String> cell210 = cell200.getNextCellOnDimension(1); ICell<String> cell211 = cell210.getNextCellOnDimension(0); ICell<String> cell212 = cell211.getNextCellOnDimension(0); ICell<String> cell220 = cell210.getNextCellOnDimension(1); ICell<String> cell221 = cell220.getNextCellOnDimension(0); ICell<String> cell222 = cell221.getNextCellOnDimension(0); // check no intruders List<ICell<String>> list = new ArrayList<ICell<String>>(); list.add(cell000); list.add(cell001); list.add(cell002); list.add(cell010); list.add(cell011); list.add(cell012); list.add(cell020); list.add(cell021); list.add(cell022); list.add(cell100); list.add(cell101); list.add(cell102); list.add(cell110); list.add(cell111); list.add(cell112); list.add(cell120); list.add(cell121); list.add(cell122); list.add(cell200); list.add(cell201); list.add(cell202); list.add(cell210); list.add(cell211); list.add(cell212); list.add(cell220); list.add(cell221); list.add(cell222); assertTrue(list.containsAll(cell000.getAllCellsAround())); assertTrue(list.containsAll(cell001.getAllCellsAround())); assertTrue(list.containsAll(cell002.getAllCellsAround())); assertTrue(list.containsAll(cell010.getAllCellsAround())); assertTrue(list.containsAll(cell011.getAllCellsAround())); assertTrue(list.containsAll(cell012.getAllCellsAround())); assertTrue(list.containsAll(cell020.getAllCellsAround())); assertTrue(list.containsAll(cell021.getAllCellsAround())); assertTrue(list.containsAll(cell022.getAllCellsAround())); assertTrue(list.containsAll(cell100.getAllCellsAround())); assertTrue(list.containsAll(cell101.getAllCellsAround())); assertTrue(list.containsAll(cell102.getAllCellsAround())); assertTrue(list.containsAll(cell110.getAllCellsAround())); assertTrue(list.containsAll(cell111.getAllCellsAround())); assertTrue(list.containsAll(cell112.getAllCellsAround())); assertTrue(list.containsAll(cell120.getAllCellsAround())); assertTrue(list.containsAll(cell121.getAllCellsAround())); assertTrue(list.containsAll(cell122.getAllCellsAround())); assertTrue(list.containsAll(cell200.getAllCellsAround())); assertTrue(list.containsAll(cell201.getAllCellsAround())); assertTrue(list.containsAll(cell202.getAllCellsAround())); assertTrue(list.containsAll(cell210.getAllCellsAround())); assertTrue(list.containsAll(cell211.getAllCellsAround())); assertTrue(list.containsAll(cell212.getAllCellsAround())); assertTrue(list.containsAll(cell220.getAllCellsAround())); assertTrue(list.containsAll(cell221.getAllCellsAround())); assertTrue(list.containsAll(cell222.getAllCellsAround())); // check cells exclusivity List<ICell<String>> cells = new ArrayList<ICell<String>>(list); for (int i = 0; i < 27; i++) { ICell<String> expected = cells.get(i); for (int j = 0; j < 27; j++) { ICell<String> result = cells.get(j); if (i == j) { assertSame(expected, result); } else { assertNotSame(expected, result); } } } // check cells links assertEquals(cell002, cell000.getPreviousCellOnDimension(0)); assertEquals(cell001, cell000.getNextCellOnDimension(0)); assertEquals(cell020, cell000.getPreviousCellOnDimension(1)); assertEquals(cell010, cell000.getNextCellOnDimension(1)); assertEquals(cell200, cell000.getPreviousCellOnDimension(2)); assertEquals(cell100, cell000.getNextCellOnDimension(2)); assertEquals(cell000, cell001.getPreviousCellOnDimension(0)); assertEquals(cell002, cell001.getNextCellOnDimension(0)); assertEquals(cell021, cell001.getPreviousCellOnDimension(1)); assertEquals(cell011, cell001.getNextCellOnDimension(1)); assertEquals(cell201, cell001.getPreviousCellOnDimension(2)); assertEquals(cell101, cell001.getNextCellOnDimension(2)); assertEquals(cell001, cell002.getPreviousCellOnDimension(0)); assertEquals(cell000, cell002.getNextCellOnDimension(0)); assertEquals(cell022, cell002.getPreviousCellOnDimension(1)); assertEquals(cell012, cell002.getNextCellOnDimension(1)); assertEquals(cell202, cell002.getPreviousCellOnDimension(2)); assertEquals(cell102, cell002.getNextCellOnDimension(2)); assertEquals(cell012, cell010.getPreviousCellOnDimension(0)); assertEquals(cell011, cell010.getNextCellOnDimension(0)); assertEquals(cell000, cell010.getPreviousCellOnDimension(1)); assertEquals(cell020, cell010.getNextCellOnDimension(1)); assertEquals(cell210, cell010.getPreviousCellOnDimension(2)); assertEquals(cell110, cell010.getNextCellOnDimension(2)); assertEquals(cell010, cell011.getPreviousCellOnDimension(0)); assertEquals(cell012, cell011.getNextCellOnDimension(0)); assertEquals(cell001, cell011.getPreviousCellOnDimension(1)); assertEquals(cell021, cell011.getNextCellOnDimension(1)); assertEquals(cell211, cell011.getPreviousCellOnDimension(2)); assertEquals(cell111, cell011.getNextCellOnDimension(2)); assertEquals(cell011, cell012.getPreviousCellOnDimension(0)); assertEquals(cell010, cell012.getNextCellOnDimension(0)); assertEquals(cell002, cell012.getPreviousCellOnDimension(1)); assertEquals(cell022, cell012.getNextCellOnDimension(1)); assertEquals(cell212, cell012.getPreviousCellOnDimension(2)); assertEquals(cell112, cell012.getNextCellOnDimension(2)); assertEquals(cell022, cell020.getPreviousCellOnDimension(0)); assertEquals(cell021, cell020.getNextCellOnDimension(0)); assertEquals(cell010, cell020.getPreviousCellOnDimension(1)); assertEquals(cell000, cell020.getNextCellOnDimension(1)); assertEquals(cell220, cell020.getPreviousCellOnDimension(2)); assertEquals(cell120, cell020.getNextCellOnDimension(2)); assertEquals(cell020, cell021.getPreviousCellOnDimension(0)); assertEquals(cell022, cell021.getNextCellOnDimension(0)); assertEquals(cell011, cell021.getPreviousCellOnDimension(1)); assertEquals(cell001, cell021.getNextCellOnDimension(1)); assertEquals(cell221, cell021.getPreviousCellOnDimension(2)); assertEquals(cell121, cell021.getNextCellOnDimension(2)); assertEquals(cell021, cell022.getPreviousCellOnDimension(0)); assertEquals(cell020, cell022.getNextCellOnDimension(0)); assertEquals(cell012, cell022.getPreviousCellOnDimension(1)); assertEquals(cell002, cell022.getNextCellOnDimension(1)); assertEquals(cell222, cell022.getPreviousCellOnDimension(2)); assertEquals(cell122, cell022.getNextCellOnDimension(2)); assertEquals(cell102, cell100.getPreviousCellOnDimension(0)); assertEquals(cell101, cell100.getNextCellOnDimension(0)); assertEquals(cell120, cell100.getPreviousCellOnDimension(1)); assertEquals(cell110, cell100.getNextCellOnDimension(1)); assertEquals(cell000, cell100.getPreviousCellOnDimension(2)); assertEquals(cell200, cell100.getNextCellOnDimension(2)); assertEquals(cell100, cell101.getPreviousCellOnDimension(0)); assertEquals(cell102, cell101.getNextCellOnDimension(0)); assertEquals(cell121, cell101.getPreviousCellOnDimension(1)); assertEquals(cell111, cell101.getNextCellOnDimension(1)); assertEquals(cell001, cell101.getPreviousCellOnDimension(2)); assertEquals(cell201, cell101.getNextCellOnDimension(2)); assertEquals(cell101, cell102.getPreviousCellOnDimension(0)); assertEquals(cell100, cell102.getNextCellOnDimension(0)); assertEquals(cell122, cell102.getPreviousCellOnDimension(1)); assertEquals(cell112, cell102.getNextCellOnDimension(1)); assertEquals(cell002, cell102.getPreviousCellOnDimension(2)); assertEquals(cell202, cell102.getNextCellOnDimension(2)); assertEquals(cell112, cell110.getPreviousCellOnDimension(0)); assertEquals(cell111, cell110.getNextCellOnDimension(0)); assertEquals(cell100, cell110.getPreviousCellOnDimension(1)); assertEquals(cell120, cell110.getNextCellOnDimension(1)); assertEquals(cell010, cell110.getPreviousCellOnDimension(2)); assertEquals(cell210, cell110.getNextCellOnDimension(2)); assertEquals(cell110, cell111.getPreviousCellOnDimension(0)); assertEquals(cell112, cell111.getNextCellOnDimension(0)); assertEquals(cell101, cell111.getPreviousCellOnDimension(1)); assertEquals(cell121, cell111.getNextCellOnDimension(1)); assertEquals(cell011, cell111.getPreviousCellOnDimension(2)); assertEquals(cell211, cell111.getNextCellOnDimension(2)); assertEquals(cell111, cell112.getPreviousCellOnDimension(0)); assertEquals(cell110, cell112.getNextCellOnDimension(0)); assertEquals(cell102, cell112.getPreviousCellOnDimension(1)); assertEquals(cell122, cell112.getNextCellOnDimension(1)); assertEquals(cell012, cell112.getPreviousCellOnDimension(2)); assertEquals(cell212, cell112.getNextCellOnDimension(2)); assertEquals(cell122, cell120.getPreviousCellOnDimension(0)); assertEquals(cell121, cell120.getNextCellOnDimension(0)); assertEquals(cell110, cell120.getPreviousCellOnDimension(1)); assertEquals(cell100, cell120.getNextCellOnDimension(1)); assertEquals(cell020, cell120.getPreviousCellOnDimension(2)); assertEquals(cell220, cell120.getNextCellOnDimension(2)); assertEquals(cell120, cell121.getPreviousCellOnDimension(0)); assertEquals(cell122, cell121.getNextCellOnDimension(0)); assertEquals(cell111, cell121.getPreviousCellOnDimension(1)); assertEquals(cell101, cell121.getNextCellOnDimension(1)); assertEquals(cell021, cell121.getPreviousCellOnDimension(2)); assertEquals(cell221, cell121.getNextCellOnDimension(2)); assertEquals(cell121, cell122.getPreviousCellOnDimension(0)); assertEquals(cell120, cell122.getNextCellOnDimension(0)); assertEquals(cell112, cell122.getPreviousCellOnDimension(1)); assertEquals(cell102, cell122.getNextCellOnDimension(1)); assertEquals(cell022, cell122.getPreviousCellOnDimension(2)); assertEquals(cell222, cell122.getNextCellOnDimension(2)); assertEquals(cell202, cell200.getPreviousCellOnDimension(0)); assertEquals(cell201, cell200.getNextCellOnDimension(0)); assertEquals(cell220, cell200.getPreviousCellOnDimension(1)); assertEquals(cell210, cell200.getNextCellOnDimension(1)); assertEquals(cell100, cell200.getPreviousCellOnDimension(2)); assertEquals(cell000, cell200.getNextCellOnDimension(2)); assertEquals(cell200, cell201.getPreviousCellOnDimension(0)); assertEquals(cell202, cell201.getNextCellOnDimension(0)); assertEquals(cell221, cell201.getPreviousCellOnDimension(1)); assertEquals(cell211, cell201.getNextCellOnDimension(1)); assertEquals(cell101, cell201.getPreviousCellOnDimension(2)); assertEquals(cell001, cell201.getNextCellOnDimension(2)); assertEquals(cell201, cell202.getPreviousCellOnDimension(0)); assertEquals(cell200, cell202.getNextCellOnDimension(0)); assertEquals(cell222, cell202.getPreviousCellOnDimension(1)); assertEquals(cell212, cell202.getNextCellOnDimension(1)); assertEquals(cell102, cell202.getPreviousCellOnDimension(2)); assertEquals(cell002, cell202.getNextCellOnDimension(2)); assertEquals(cell212, cell210.getPreviousCellOnDimension(0)); assertEquals(cell211, cell210.getNextCellOnDimension(0)); assertEquals(cell200, cell210.getPreviousCellOnDimension(1)); assertEquals(cell220, cell210.getNextCellOnDimension(1)); assertEquals(cell110, cell210.getPreviousCellOnDimension(2)); assertEquals(cell010, cell210.getNextCellOnDimension(2)); assertEquals(cell210, cell211.getPreviousCellOnDimension(0)); assertEquals(cell212, cell211.getNextCellOnDimension(0)); assertEquals(cell201, cell211.getPreviousCellOnDimension(1)); assertEquals(cell221, cell211.getNextCellOnDimension(1)); assertEquals(cell111, cell211.getPreviousCellOnDimension(2)); assertEquals(cell011, cell211.getNextCellOnDimension(2)); assertEquals(cell211, cell212.getPreviousCellOnDimension(0)); assertEquals(cell210, cell212.getNextCellOnDimension(0)); assertEquals(cell202, cell212.getPreviousCellOnDimension(1)); assertEquals(cell222, cell212.getNextCellOnDimension(1)); assertEquals(cell112, cell212.getPreviousCellOnDimension(2)); assertEquals(cell012, cell212.getNextCellOnDimension(2)); assertEquals(cell222, cell220.getPreviousCellOnDimension(0)); assertEquals(cell221, cell220.getNextCellOnDimension(0)); assertEquals(cell210, cell220.getPreviousCellOnDimension(1)); assertEquals(cell200, cell220.getNextCellOnDimension(1)); assertEquals(cell120, cell220.getPreviousCellOnDimension(2)); assertEquals(cell020, cell220.getNextCellOnDimension(2)); assertEquals(cell220, cell221.getPreviousCellOnDimension(0)); assertEquals(cell222, cell221.getNextCellOnDimension(0)); assertEquals(cell211, cell221.getPreviousCellOnDimension(1)); assertEquals(cell201, cell221.getNextCellOnDimension(1)); assertEquals(cell121, cell221.getPreviousCellOnDimension(2)); assertEquals(cell021, cell221.getNextCellOnDimension(2)); assertEquals(cell221, cell222.getPreviousCellOnDimension(0)); assertEquals(cell220, cell222.getNextCellOnDimension(0)); assertEquals(cell212, cell222.getPreviousCellOnDimension(1)); assertEquals(cell202, cell222.getNextCellOnDimension(1)); assertEquals(cell122, cell222.getPreviousCellOnDimension(2)); assertEquals(cell022, cell222.getNextCellOnDimension(2)); // check coords assertEquals(new Coords(0, 0, 0), cell000.getCoords()); assertEquals(new Coords(1, 0, 0), cell001.getCoords()); assertEquals(new Coords(2, 0, 0), cell002.getCoords()); assertEquals(new Coords(0, 1, 0), cell010.getCoords()); assertEquals(new Coords(1, 1, 0), cell011.getCoords()); assertEquals(new Coords(2, 1, 0), cell012.getCoords()); assertEquals(new Coords(0, 2, 0), cell020.getCoords()); assertEquals(new Coords(1, 2, 0), cell021.getCoords()); assertEquals(new Coords(2, 2, 0), cell022.getCoords()); assertEquals(new Coords(0, 0, 1), cell100.getCoords()); assertEquals(new Coords(1, 0, 1), cell101.getCoords()); assertEquals(new Coords(2, 0, 1), cell102.getCoords()); assertEquals(new Coords(0, 1, 1), cell110.getCoords()); assertEquals(new Coords(1, 1, 1), cell111.getCoords()); assertEquals(new Coords(2, 1, 1), cell112.getCoords()); assertEquals(new Coords(0, 2, 1), cell120.getCoords()); assertEquals(new Coords(1, 2, 1), cell121.getCoords()); assertEquals(new Coords(2, 2, 1), cell122.getCoords()); assertEquals(new Coords(0, 0, 2), cell200.getCoords()); assertEquals(new Coords(1, 0, 2), cell201.getCoords()); assertEquals(new Coords(2, 0, 2), cell202.getCoords()); assertEquals(new Coords(0, 1, 2), cell210.getCoords()); assertEquals(new Coords(1, 1, 2), cell211.getCoords()); assertEquals(new Coords(2, 1, 2), cell212.getCoords()); assertEquals(new Coords(0, 2, 2), cell220.getCoords()); assertEquals(new Coords(1, 2, 2), cell221.getCoords()); assertEquals(new Coords(2, 2, 2), cell222.getCoords()); } @Test public void testSpaceStates() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "0", "1", "2" }); } @Override public void customize(ICell<String> cell) { cell.setCurrentState("" + cell.getCoords().get(0)); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).createNewSpace().addDimension(3); // get cells ICell<String> cell0 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell1 = cell0.getNextCellOnDimension(0); ICell<String> cell2 = cell1.getNextCellOnDimension(0); // check cells state assertEquals("0", cell0.getCurrentState()); assertEquals("1", cell1.getCurrentState()); assertEquals("2", cell2.getCurrentState()); } @Test public void testCoordsMutability() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "0", "1", "2" }); } @Override public void customize(ICell<String> cell) { cell.setCurrentState("" + cell.getCoords().get(0)); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); builder.setStateFactory(stateFactory).createNewSpace().addDimension(3); // get cells ICell<String> cell0 = builder.getSpaceOfCell().getOrigin(); ICell<String> cell1 = cell0.getNextCellOnDimension(0); ICell<String> cell2 = cell1.getNextCellOnDimension(0); // check coords mutability assertFalse(cell0.getCoords().isMutable()); assertFalse(cell1.getCoords().isMutable()); assertFalse(cell2.getCoords().isMutable()); } @Test public void testFinalisation() { // generate space IStateFactory<String> stateFactory = new AbstractStateFactory<String>() { public List<String> getPossibleStates() { return Arrays.asList(new String[] { "" }); } }; SpaceBuilder<String> builder = new SpaceBuilder<String>(); assertFalse(builder.isSpaceFinalized()); builder.setStateFactory(stateFactory); assertFalse(builder.isSpaceFinalized()); builder.setMemorySize(1); assertFalse(builder.isSpaceFinalized()); builder.createNewSpace(); assertFalse(builder.isSpaceFinalized()); builder.addDimension(3); assertFalse(builder.isSpaceFinalized()); builder.finalizeSpace(); assertTrue(builder.isSpaceFinalized()); builder.getSpaceOfCell(); assertTrue(builder.isSpaceFinalized()); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Map; import static org.elasticsearch.cluster.ClusterState.builder; /** * */ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse> { private final AllocationService allocationService; private final DynamicSettings dynamicSettings; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, ClusterUpdateSettingsRequest::new); this.allocationService = allocationService; this.dynamicSettings = dynamicSettings; } @Override protected String executor() { return ThreadPool.Names.SAME; } @Override protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) { // allow for dedicated changes to the metadata blocks, so we don't block those to allow to "re-enable" it if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && request.persistentSettings().get(MetaData.SETTING_READ_ONLY) != null) || request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && request.transientSettings().get(MetaData.SETTING_READ_ONLY) != null) { return null; } return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @Override protected ClusterUpdateSettingsResponse newResponse() { return new ClusterUpdateSettingsResponse(); } @Override protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener<ClusterUpdateSettingsResponse> listener) { final Settings.Builder transientUpdates = Settings.settingsBuilder(); final Settings.Builder persistentUpdates = Settings.settingsBuilder(); clusterService.submitStateUpdateTask("cluster_update_settings", new AckedClusterStateUpdateTask<ClusterUpdateSettingsResponse>(Priority.IMMEDIATE, request, listener) { private volatile boolean changed = false; @Override protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { return new ClusterUpdateSettingsResponse(acknowledged, transientUpdates.build(), persistentUpdates.build()); } @Override public void onAllNodesAcked(@Nullable Throwable t) { if (changed) { reroute(true); } else { super.onAllNodesAcked(t); } } @Override public void onAckTimeout() { if (changed) { reroute(false); } else { super.onAckTimeout(); } } private void reroute(final boolean updateSettingsAcked) { // We're about to send a second update task, so we need to check if we're still the elected master // For example the minimum_master_node could have been breached and we're no longer elected master, // so we should *not* execute the reroute. if (!clusterService.state().nodes().localNodeMaster()) { logger.debug("Skipping reroute after cluster update settings, because node is no longer master"); listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); return; } // The reason the reroute needs to be send as separate update task, is that all the *cluster* settings are encapsulate // in the components (e.g. FilterAllocationDecider), so the changes made by the first call aren't visible // to the components until the ClusterStateListener instances have been invoked, but are visible after // the first update task has been completed. clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", new AckedClusterStateUpdateTask<ClusterUpdateSettingsResponse>(Priority.URGENT, request, listener) { @Override public boolean mustAck(DiscoveryNode discoveryNode) { //we wait for the reroute ack only if the update settings was acknowledged return updateSettingsAcked; } @Override //we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the update settings was acknowledged protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, transientUpdates.build(), persistentUpdates.build()); } @Override public void onNoLongerMaster(String source) { logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master"); listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); } @Override public void onFailure(String source, Throwable t) { //if the reroute fails we only log logger.debug("failed to perform [{}]", t, source); listener.onFailure(new ElasticsearchException("reroute after update settings failed", t)); } @Override public ClusterState execute(final ClusterState currentState) { // now, reroute in case things that require it changed (e.g. number of replicas) RoutingAllocation.Result routingResult = allocationService.reroute(currentState, "reroute after cluster update settings"); if (!routingResult.changed()) { return currentState; } return ClusterState.builder(currentState).routingResult(routingResult).build(); } }); } @Override public void onFailure(String source, Throwable t) { logger.debug("failed to perform [{}]", t, source); super.onFailure(source, t); } @Override public ClusterState execute(final ClusterState currentState) { Settings.Builder transientSettings = Settings.settingsBuilder(); transientSettings.put(currentState.metaData().transientSettings()); for (Map.Entry<String, String> entry : request.transientSettings().getAsMap().entrySet()) { if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); if (error == null) { transientSettings.put(entry.getKey(), entry.getValue()); transientUpdates.put(entry.getKey(), entry.getValue()); changed = true; } else { logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error); } } else { logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey()); } } Settings.Builder persistentSettings = Settings.settingsBuilder(); persistentSettings.put(currentState.metaData().persistentSettings()); for (Map.Entry<String, String> entry : request.persistentSettings().getAsMap().entrySet()) { if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue(), clusterService.state()); if (error == null) { persistentSettings.put(entry.getKey(), entry.getValue()); persistentUpdates.put(entry.getKey(), entry.getValue()); changed = true; } else { logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); } } else { logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); } } if (!changed) { return currentState; } MetaData.Builder metaData = MetaData.builder(currentState.metaData()) .persistentSettings(persistentSettings.build()) .transientSettings(transientSettings.build()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false); if (updatedReadOnly) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } else { blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } return builder(currentState).metaData(metaData).blocks(blocks).build(); } }); } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.devtools.build.lib.analysis.TargetAndConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ConfigurationResolver.TopLevelTargetsAndConfigsResult; import com.google.devtools.build.lib.analysis.config.FragmentClassSet; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.NoSuchTargetException; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec.VisibleForSerialization; import com.google.devtools.build.skyframe.SkyFunctionName; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; /** * A value referring to a set of build configuration keys in order to reconstruct the * legacy {@link BuildConfigurationCollection} as well as a set of top level configured target keys * that are subsequently requested to trigger the analysis phase. * * <p>The public interface returns {@link BuildConfigurationCollection} and {@link * TargetAndConfiguration} even though these are not internally stored - the construction of these * objects requires additional Skyframe calls. The intention is that these are temporary until a * larger fraction of the code has been ported to Skyframe, at which point we'll use the internal * representation. */ @Immutable @ThreadSafe @AutoCodec public final class PrepareAnalysisPhaseValue implements SkyValue { private final BuildConfigurationKey hostConfigurationKey; private final ImmutableList<BuildConfigurationKey> targetConfigurationKeys; private final ImmutableList<ConfiguredTargetKey> topLevelCtKeys; PrepareAnalysisPhaseValue( BuildConfigurationKey hostConfigurationKey, ImmutableList<BuildConfigurationKey> targetConfigurationKeys, ImmutableList<ConfiguredTargetKey> topLevelCtKeys) { this.hostConfigurationKey = Preconditions.checkNotNull(hostConfigurationKey); this.targetConfigurationKeys = Preconditions.checkNotNull(targetConfigurationKeys); this.topLevelCtKeys = Preconditions.checkNotNull(topLevelCtKeys); } /** * Returns the legacy {@link BuildConfigurationCollection}. Note that this performs additional * Skyframe calls, which may be expensive. */ public BuildConfigurationCollection getConfigurations( ExtendedEventHandler eventHandler, SkyframeExecutor skyframeExecutor) throws InvalidConfigurationException { BuildConfigurationValue hostConfiguration = skyframeExecutor.getConfiguration(eventHandler, hostConfigurationKey); ImmutableList<BuildConfigurationValue> targetConfigurations = ImmutableList.copyOf( skyframeExecutor.getConfigurations(eventHandler, targetConfigurationKeys).values()); return new BuildConfigurationCollection(targetConfigurations, hostConfiguration); } /** * Returns the intended top-level targets and configurations for the build. Note that this * performs additional Skyframe calls for the involved configurations and targets, which may be * expensive. * * <p>Skips targets that have errors and registers the errors to be reported later as part of * {@link com.google.devtools.build.lib.analysis.AnalysisResult} error resolution. */ public TopLevelTargetsAndConfigsResult getTopLevelCts( ExtendedEventHandler eventHandler, SkyframeExecutor skyframeExecutor) { List<TargetAndConfiguration> result = new ArrayList<>(); Map<BuildConfigurationKey, BuildConfigurationValue> configs = skyframeExecutor.getConfigurations( eventHandler, topLevelCtKeys.stream() .map(ConfiguredTargetKey::getConfigurationKey) .filter(Predicates.notNull()) .collect(Collectors.toSet())); // TODO(ulfjack): This performs one Skyframe call per top-level target. This is not a // regression, but we should fix it nevertheless, either by doing a bulk lookup call or by // migrating the consumers of these to Skyframe so they can directly request the values. boolean hasError = false; for (ConfiguredTargetKey key : topLevelCtKeys) { Target target; try { target = skyframeExecutor.getPackageManager().getTarget(eventHandler, key.getLabel()); } catch (NoSuchPackageException | NoSuchTargetException | InterruptedException e) { eventHandler.handle( Event.error("Failed to get package from TargetPatternPhaseValue: " + e.getMessage())); hasError = true; continue; } BuildConfigurationValue config = key.getConfigurationKey() == null ? null : configs.get(key.getConfigurationKey()); result.add(new TargetAndConfiguration(target, config)); } return new TopLevelTargetsAndConfigsResult(result, hasError); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof PrepareAnalysisPhaseValue)) { return false; } PrepareAnalysisPhaseValue that = (PrepareAnalysisPhaseValue) obj; return this.hostConfigurationKey.equals(that.hostConfigurationKey) && this.targetConfigurationKeys.equals(that.targetConfigurationKeys) && this.topLevelCtKeys.equals(that.topLevelCtKeys); } @Override public int hashCode() { return Objects.hash( this.hostConfigurationKey, this.targetConfigurationKeys, this.topLevelCtKeys); } /** Create a prepare analysis phase key. */ @ThreadSafe public static SkyKey key( FragmentClassSet fragments, BuildOptions options, Set<String> multiCpu, Collection<Label> labels) { return new PrepareAnalysisPhaseKey(fragments, options, multiCpu, labels); } /** The configuration needed to prepare the analysis phase. */ @ThreadSafe @VisibleForSerialization @AutoCodec public static final class PrepareAnalysisPhaseKey implements SkyKey, Serializable { private final FragmentClassSet fragments; private final BuildOptions options; private final ImmutableSortedSet<String> multiCpu; private final ImmutableSet<Label> labels; PrepareAnalysisPhaseKey( FragmentClassSet fragments, BuildOptions options, Set<String> multiCpu, Collection<Label> labels) { this.fragments = Preconditions.checkNotNull(fragments); this.options = Preconditions.checkNotNull(options); this.multiCpu = ImmutableSortedSet.copyOf(multiCpu); this.labels = ImmutableSet.copyOf(labels); } @Override public SkyFunctionName functionName() { return SkyFunctions.PREPARE_ANALYSIS_PHASE; } public FragmentClassSet getFragments() { return fragments; } public BuildOptions getOptions() { return options; } public ImmutableSortedSet<String> getMultiCpu() { return multiCpu; } public ImmutableSet<Label> getLabels() { return labels; } @Override public String toString() { return MoreObjects.toStringHelper(PrepareAnalysisPhaseKey.class) .add("fragments", fragments) .add("optionsDiff", options) .add("multiCpu", multiCpu) .add("labels", labels) .toString(); } @Override public int hashCode() { return Objects.hash(fragments, options, multiCpu, labels); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof PrepareAnalysisPhaseKey)) { return false; } PrepareAnalysisPhaseKey other = (PrepareAnalysisPhaseKey) obj; return other.fragments.equals(this.fragments) && other.options.equals(this.options) && other.multiCpu.equals(multiCpu) && other.labels.equals(labels); } } }
package org.sagebionetworks.repo.model.dbo.persistence; import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.COL_USER_GROUP_ID; import static org.sagebionetworks.repo.model.query.jdo.SqlConstants.TABLE_USER_GROUP; import java.util.Arrays; import java.util.List; import org.sagebionetworks.repo.model.dbo.AutoTableMapping; import org.sagebionetworks.repo.model.dbo.Field; import org.sagebionetworks.repo.model.dbo.ForeignKey; import org.sagebionetworks.repo.model.dbo.MigratableDatabaseObject; import org.sagebionetworks.repo.model.dbo.Table; import org.sagebionetworks.repo.model.dbo.TableMapping; import org.sagebionetworks.repo.model.dbo.migration.MigratableTableTranslation; import org.sagebionetworks.repo.model.migration.MigrationType; import org.sagebionetworks.repo.model.query.jdo.SqlConstants; @Table(name = SqlConstants.TABLE_QUIZ_RESPONSE) public class DBOQuizResponse implements MigratableDatabaseObject<DBOQuizResponse, DBOQuizResponse> { @Field(name = SqlConstants.COL_QUIZ_RESPONSE_ID, backupId = true, primary = true, nullable = false) private Long id; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_CREATED_BY, backupId = false, primary = false, nullable = false) @ForeignKey(table = TABLE_USER_GROUP, field = COL_USER_GROUP_ID, cascadeDelete = true) private Long createdBy; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_CREATED_ON, backupId = false, primary = false, nullable = false) private Long createdOn; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_QUIZ_ID, backupId = false, primary = false, nullable = false) private Long quizId; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_SCORE, backupId = false, primary = false, nullable = false) private Long score; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_PASSED, backupId = false, primary = false, nullable = false) private Boolean passed; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_SERIALIZED, backupId = false, primary = false, nullable = false, serialized="mediumblob") private byte[] serialized; @Field(name = SqlConstants.COL_QUIZ_RESPONSE_PASSING_RECORD, backupId = false, primary = false, nullable = false, serialized="mediumblob") private byte[] passingRecord; private static TableMapping<DBOQuizResponse> tableMapping = AutoTableMapping.create(DBOQuizResponse.class); @Override public TableMapping<DBOQuizResponse> getTableMapping() { return tableMapping; } @Override public MigrationType getMigratableTableType() { return MigrationType.QUIZ_RESPONSE; } @Override public MigratableTableTranslation<DBOQuizResponse, DBOQuizResponse> getTranslator() { // We do not currently have a backup for this object. return new MigratableTableTranslation<DBOQuizResponse, DBOQuizResponse>() { @Override public DBOQuizResponse createDatabaseObjectFromBackup(DBOQuizResponse backup) { return backup; } @Override public DBOQuizResponse createBackupFromDatabaseObject(DBOQuizResponse dbo) { return dbo; } }; } @Override public Class<? extends DBOQuizResponse> getBackupClass() { return DBOQuizResponse.class; } @Override public Class<? extends DBOQuizResponse> getDatabaseObjectClass() { return DBOQuizResponse.class; } @Override public List<MigratableDatabaseObject<?,?>> getSecondaryTypes() { return null; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public Long getCreatedBy() { return createdBy; } public void setCreatedBy(Long createdBy) { this.createdBy = createdBy; } public Long getCreatedOn() { return createdOn; } public void setCreatedOn(Long createdOn) { this.createdOn = createdOn; } public Long getQuizId() { return quizId; } public void setQuizId(Long quizId) { this.quizId = quizId; } public Long getScore() { return score; } public void setScore(Long score) { this.score = score; } public Boolean getPassed() { return passed; } public void setPassed(Boolean passed) { this.passed = passed; } public byte[] getSerialized() { return serialized; } public void setSerialized(byte[] serialized) { this.serialized = serialized; } public byte[] getPassingRecord() { return passingRecord; } public void setPassingRecord(byte[] passingRecord) { this.passingRecord = passingRecord; } public static void setTableMapping(TableMapping<DBOQuizResponse> tableMapping) { DBOQuizResponse.tableMapping = tableMapping; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((createdBy == null) ? 0 : createdBy.hashCode()); result = prime * result + ((createdOn == null) ? 0 : createdOn.hashCode()); result = prime * result + ((id == null) ? 0 : id.hashCode()); result = prime * result + ((passed == null) ? 0 : passed.hashCode()); result = prime * result + Arrays.hashCode(passingRecord); result = prime * result + ((quizId == null) ? 0 : quizId.hashCode()); result = prime * result + ((score == null) ? 0 : score.hashCode()); result = prime * result + Arrays.hashCode(serialized); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; DBOQuizResponse other = (DBOQuizResponse) obj; if (createdBy == null) { if (other.createdBy != null) return false; } else if (!createdBy.equals(other.createdBy)) return false; if (createdOn == null) { if (other.createdOn != null) return false; } else if (!createdOn.equals(other.createdOn)) return false; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; if (passed == null) { if (other.passed != null) return false; } else if (!passed.equals(other.passed)) return false; if (!Arrays.equals(passingRecord, other.passingRecord)) return false; if (quizId == null) { if (other.quizId != null) return false; } else if (!quizId.equals(other.quizId)) return false; if (score == null) { if (other.score != null) return false; } else if (!score.equals(other.score)) return false; if (!Arrays.equals(serialized, other.serialized)) return false; return true; } @Override public String toString() { return "DBOQuizResponse [id=" + id + ", createdBy=" + createdBy + ", createdOn=" + createdOn + ", quizId=" + quizId + ", score=" + score + ", passed=" + passed + ", serialized=" + Arrays.toString(serialized) + ", passingRecord=" + Arrays.toString(passingRecord) + "]"; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.containerregistry.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.CloudException; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in Operations. */ public class OperationsInner { /** The Retrofit service to perform REST calls. */ private OperationsService service; /** The service client containing this operation class. */ private ContainerRegistryManagementClientImpl client; /** * Initializes an instance of OperationsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public OperationsInner(Retrofit retrofit, ContainerRegistryManagementClientImpl client) { this.service = retrofit.create(OperationsService.class); this.client = client; } /** * The interface defining all the services for Operations to be * used by Retrofit to perform actually REST calls. */ interface OperationsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.containerregistry.Operations list" }) @GET("providers/Microsoft.ContainerRegistry/operations") Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.containerregistry.Operations listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Lists all of the available Azure Container Registry REST API operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;OperationDefinitionInner&gt; object if successful. */ public PagedList<OperationDefinitionInner> list() { ServiceResponse<Page<OperationDefinitionInner>> response = listSinglePageAsync().toBlocking().single(); return new PagedList<OperationDefinitionInner>(response.body()) { @Override public Page<OperationDefinitionInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the available Azure Container Registry REST API operations. * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<OperationDefinitionInner>> listAsync(final ListOperationCallback<OperationDefinitionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(), new Func1<String, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the available Azure Container Registry REST API operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationDefinitionInner&gt; object */ public Observable<Page<OperationDefinitionInner>> listAsync() { return listWithServiceResponseAsync() .map(new Func1<ServiceResponse<Page<OperationDefinitionInner>>, Page<OperationDefinitionInner>>() { @Override public Page<OperationDefinitionInner> call(ServiceResponse<Page<OperationDefinitionInner>> response) { return response.body(); } }); } /** * Lists all of the available Azure Container Registry REST API operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationDefinitionInner&gt; object */ public Observable<ServiceResponse<Page<OperationDefinitionInner>>> listWithServiceResponseAsync() { return listSinglePageAsync() .concatMap(new Func1<ServiceResponse<Page<OperationDefinitionInner>>, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(ServiceResponse<Page<OperationDefinitionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the available Azure Container Registry REST API operations. * * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;OperationDefinitionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<OperationDefinitionInner>>> listSinglePageAsync() { if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.list(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<OperationDefinitionInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<OperationDefinitionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<OperationDefinitionInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<OperationDefinitionInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<OperationDefinitionInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Lists all of the available Azure Container Registry REST API operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;OperationDefinitionInner&gt; object if successful. */ public PagedList<OperationDefinitionInner> listNext(final String nextPageLink) { ServiceResponse<Page<OperationDefinitionInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<OperationDefinitionInner>(response.body()) { @Override public Page<OperationDefinitionInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Lists all of the available Azure Container Registry REST API operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<OperationDefinitionInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<OperationDefinitionInner>> serviceFuture, final ListOperationCallback<OperationDefinitionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Lists all of the available Azure Container Registry REST API operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationDefinitionInner&gt; object */ public Observable<Page<OperationDefinitionInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<OperationDefinitionInner>>, Page<OperationDefinitionInner>>() { @Override public Page<OperationDefinitionInner> call(ServiceResponse<Page<OperationDefinitionInner>> response) { return response.body(); } }); } /** * Lists all of the available Azure Container Registry REST API operations. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;OperationDefinitionInner&gt; object */ public Observable<ServiceResponse<Page<OperationDefinitionInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<OperationDefinitionInner>>, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(ServiceResponse<Page<OperationDefinitionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Lists all of the available Azure Container Registry REST API operations. * ServiceResponse<PageImpl<OperationDefinitionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;OperationDefinitionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<OperationDefinitionInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<OperationDefinitionInner>>>>() { @Override public Observable<ServiceResponse<Page<OperationDefinitionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<OperationDefinitionInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<OperationDefinitionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<OperationDefinitionInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<OperationDefinitionInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<OperationDefinitionInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } }
/* * Copyright 2019 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config.materials.svn; import com.thoughtworks.go.config.PasswordEncrypter; import com.thoughtworks.go.config.materials.PasswordAwareMaterial; import com.thoughtworks.go.config.materials.ScmMaterial; import com.thoughtworks.go.config.materials.ScmMaterialConfig; import com.thoughtworks.go.config.materials.SubprocessExecutionContext; import com.thoughtworks.go.domain.MaterialInstance; import com.thoughtworks.go.domain.materials.*; import com.thoughtworks.go.domain.materials.svn.*; import com.thoughtworks.go.security.GoCipher; import com.thoughtworks.go.util.GoConstants; import com.thoughtworks.go.util.command.ConsoleOutputStreamConsumer; import com.thoughtworks.go.util.command.UrlArgument; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import static com.thoughtworks.go.util.ExceptionUtils.bombIfNull; import static com.thoughtworks.go.util.FileUtil.createParentFolderIfNotExist; import static java.lang.String.format; /** * @understands configuration for subversion */ public class SvnMaterial extends ScmMaterial implements PasswordEncrypter, PasswordAwareMaterial { private static final Logger LOGGER = LoggerFactory.getLogger(SvnMaterial.class); private UrlArgument url; private boolean checkExternals; private transient Subversion svnLazyLoaded; public static final String TYPE = "SvnMaterial"; private SvnMaterial(GoCipher goCipher) { super("SvnMaterial", goCipher); } public SvnMaterial(String url, String userName, String password, boolean checkExternals) { this(url, userName, password, checkExternals, new GoCipher()); } public SvnMaterial(Subversion svn) { this(svn.getUrl().originalArgument(), svn.getUserName(), svn.getPassword(), svn.isCheckExternals()); this.svnLazyLoaded = svn; } public SvnMaterial(String url, String userName, String password, boolean checkExternals, String folder) { this(url, userName, password, checkExternals); this.folder = folder; } public SvnMaterial(SvnMaterialConfig config) { this(config.getUrl(), config.getUserName(), config.getPassword(), config.isCheckExternals(), config.getGoCipher()); this.autoUpdate = config.getAutoUpdate(); this.filter = config.rawFilter(); this.invertFilter = config.getInvertFilter(); this.folder = config.getFolder(); this.name = config.getName(); } public SvnMaterial(String url, String userName, String password, boolean checkExternals, GoCipher goCipher) { super("SvnMaterial",goCipher); bombIfNull(url, "null url"); setUrl(url); this.userName = userName; setPassword(password); this.checkExternals = checkExternals; } @Override public MaterialConfig config() { return new SvnMaterialConfig(url, userName, getPassword(), checkExternals, goCipher, autoUpdate, filter, invertFilter, folder, name); } private Subversion svn() { if (svnLazyLoaded == null || !svnLazyLoaded.getUrl().equals(url)) { svnLazyLoaded = new SvnCommand(getFingerprint(), url.forCommandLine(), userName, passwordForCommandLine(), checkExternals); } return svnLazyLoaded; } public List<Modification> latestModification(File baseDir, final SubprocessExecutionContext execCtx) { return svn().latestModification(); } public List<Modification> modificationsSince(File workingDirectory, Revision revision, final SubprocessExecutionContext execCtx) { return svn().modificationsSince(new SubversionRevision(revision.getRevision())); } public MaterialInstance createMaterialInstance() { return new SvnMaterialInstance(url.originalArgument(), userName, UUID.randomUUID().toString(), checkExternals); } @Override protected void appendCriteria(Map parameters) { parameters.put(ScmMaterialConfig.URL, url.originalArgument()); parameters.put(ScmMaterialConfig.USERNAME, userName); parameters.put("checkExternals", checkExternals); } @Override protected void appendAttributes(Map parameters) { parameters.put(ScmMaterialConfig.URL, url); parameters.put(ScmMaterialConfig.USERNAME, userName); parameters.put("checkExternals", checkExternals); } public void updateTo(ConsoleOutputStreamConsumer outputStreamConsumer, File baseDir, RevisionContext revisionContext, final SubprocessExecutionContext execCtx) { Revision revision = revisionContext.getLatestRevision(); File workingDir = execCtx.isServer() ? baseDir : workingdir(baseDir); LOGGER.debug("Updating to revision: {} in workingdirectory {}", revision, workingDir); outputStreamConsumer.stdOutput(format("[%s] Start updating %s at revision %s from %s", GoConstants.PRODUCT_NAME, updatingTarget(), revision.getRevision(), url)); boolean shouldDoFreshCheckout = !workingDir.isDirectory() || isRepositoryChanged(workingDir); if (shouldDoFreshCheckout) { freshCheckout(outputStreamConsumer, new SubversionRevision(revision), workingDir); } else { cleanupAndUpdate(outputStreamConsumer, new SubversionRevision(revision), workingDir); } LOGGER.debug("done with update"); outputStreamConsumer.stdOutput(format("[%s] Done.\n", GoConstants.PRODUCT_NAME)); } public boolean isRepositoryChanged(File workingFolder) { try { File file = new File(workingFolder, ".svn"); if (workingFolder.isDirectory() && file.exists() && file.isDirectory()) { String workingUrl = svn().workingRepositoryUrl(workingFolder); return !MaterialUrl.sameUrl(url.toString(), workingUrl); } else { return true; } } catch (IOException e) { return true; } } public void freshCheckout(ConsoleOutputStreamConsumer outputStreamConsumer, SubversionRevision revision, File workingFolder) { if (workingFolder.isDirectory()) { FileUtils.deleteQuietly(workingFolder); } LOGGER.trace("Checking out to revision {} in {}", revision, workingFolder); createParentFolderIfNotExist(workingFolder); svn().checkoutTo(outputStreamConsumer, workingFolder, revision); } public void cleanupAndUpdate(ConsoleOutputStreamConsumer outputStreamConsumer, SubversionRevision revision, File workingFolder) { try { svn().cleanupAndRevert(outputStreamConsumer, workingFolder); } catch (Exception e) { String message = "Failed to do cleanup and revert in " + workingFolder.getAbsolutePath(); LOGGER.error(message); LOGGER.debug(message, e); } LOGGER.trace("Updating to revision {} on {}", revision, workingFolder); svn().updateTo(outputStreamConsumer, workingFolder, revision); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } if (!super.equals(o)) { return false; } SvnMaterial that = (SvnMaterial) o; if (checkExternals != that.checkExternals) { return false; } if (url != null ? !url.equals(that.url) : that.url != null) { return false; } if (userName != null ? !userName.equals(that.userName) : that.userName != null) { return false; } return true; } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + (url != null ? url.hashCode() : 0); result = 31 * result + (userName != null ? userName.hashCode() : 0); result = 31 * result + (checkExternals ? 1 : 0); return result; } protected String getLocation() { return url == null ? null : url.forDisplay(); } public String getTypeForDisplay() { return "Subversion"; } @Override public Map<String, Object> getAttributes(boolean addSecureFields) { Map<String, Object> materialMap = new HashMap<>(); materialMap.put("type", "svn"); Map<String, Object> configurationMap = new HashMap<>(); if (addSecureFields) { configurationMap.put("url", url.forCommandLine()); configurationMap.put("password", getPassword()); } else { configurationMap.put("url", url.forDisplay()); } configurationMap.put("username", userName); configurationMap.put("check-externals", checkExternals); materialMap.put("svn-configuration", configurationMap); return materialMap; } public Class getInstanceType() { return SvnMaterialInstance.class; } public ValidationBean checkConnection(final SubprocessExecutionContext execCtx) { return svn().checkConnection(); } @Override public String getUrl() { return url == null ? null : url.originalArgument(); } @Override public String urlForCommandLine() { return url.forCommandLine(); } @Override public UrlArgument getUrlArgument() { return url; } public String getLongDescription() { return String.format("URL: %s, Username: %s, CheckExternals: %s", url.forDisplay(), userName, checkExternals); } public void setUrl(String url) { this.url = new UrlArgument(url); } public boolean isCheckExternals() { return checkExternals; } private String folderFor(String folderForExternal) { return getFolder() == null ? folderForExternal : getFolder() + "/" + folderForExternal; } public void add(ConsoleOutputStreamConsumer outputStreamConsumer, File file) { svn().add(outputStreamConsumer, file); } public void commit(ConsoleOutputStreamConsumer outputStreamConsumer, File workingDir, String message) { svn().commit(outputStreamConsumer, workingDir, message); } @Override public boolean matches(String name, String regex) { if (!regex.startsWith("/")) { regex = "/" + regex; } return name.matches(regex); } @Override public String toString() { return "SvnMaterial{" + "url=" + url + ", userName='" + userName + '\'' + ", checkExternals=" + checkExternals + '}'; } /** * @deprecated used only in tests - we need to disentangle this */ public static SvnMaterial createSvnMaterialWithMock(Subversion svn) { return new SvnMaterial(svn); } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.worker.grpc; import alluxio.AlluxioURI; import alluxio.Constants; import alluxio.conf.PropertyKey; import alluxio.conf.ServerConfiguration; import alluxio.exception.BlockDoesNotExistException; import alluxio.exception.status.AlluxioStatusException; import alluxio.exception.status.InvalidArgumentException; import alluxio.grpc.Chunk; import alluxio.grpc.DataMessage; import alluxio.grpc.ReadResponse; import alluxio.metrics.MetricInfo; import alluxio.metrics.MetricKey; import alluxio.metrics.MetricsSystem; import alluxio.network.protocol.databuffer.DataBuffer; import alluxio.network.protocol.databuffer.NettyDataBuffer; import alluxio.resource.LockResource; import alluxio.security.authentication.AuthenticatedUserInfo; import alluxio.util.LogUtils; import alluxio.util.logging.SamplingLogger; import alluxio.wire.BlockReadRequest; import alluxio.worker.block.BlockWorker; import alluxio.worker.block.UnderFileSystemBlockReader; import alluxio.worker.block.io.BlockReader; import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import com.google.common.base.Preconditions; import com.google.protobuf.UnsafeByteOperations; import io.grpc.Status; import io.grpc.StatusRuntimeException; import io.grpc.internal.SerializingExecutor; import io.grpc.stub.CallStreamObserver; import io.grpc.stub.StreamObserver; import io.netty.buffer.ByteBuf; import io.netty.buffer.PooledByteBufAllocator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.locks.ReentrantLock; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.NotThreadSafe; /** * This class handles {@link BlockReadRequest}s. * * Protocol: Check {@link alluxio.client.block.stream.GrpcDataReader} for additional information. * 1. Once a read request is received, the handler creates a {@link DataReader} which reads * chunks of data from the block worker and pushes them to the buffer. * 2. The {@link DataReader} pauses if there are too many packets in flight, and resumes if there * is room available. * 3. The channel is closed if there is any exception during the data read/write. * * Threading model: * Only two threads are involved at a given point of time: gRPC event thread, data reader thread. * 1. The gRPC event thread accepts the read request, handles write callbacks. If any exception * occurs (e.g. failed to read from stream or respond to stream) or the read request is cancelled * by the client, the gRPC event thread notifies the data reader thread. * 2. The data reader thread keeps reading from the file and writes to buffer. Before reading a * new data chunk, it checks whether there are notifications (e.g. cancel, error), if * there is, handle them properly. See more information about the notifications in the javadoc * of {@link BlockReadRequestContext} about CANCEL, EOF, and ERROR flags. * * @see BlockReadRequestContext */ @NotThreadSafe public class BlockReadHandler implements StreamObserver<alluxio.grpc.ReadRequest> { private static final Logger LOG = LoggerFactory.getLogger(BlockReadHandler.class); private static final long MAX_CHUNK_SIZE = ServerConfiguration.getBytes(PropertyKey.WORKER_NETWORK_READER_MAX_CHUNK_SIZE_BYTES); private static final long MAX_BYTES_IN_FLIGHT = ServerConfiguration.getBytes(PropertyKey.WORKER_NETWORK_READER_BUFFER_SIZE_BYTES); private static final Logger SLOW_BUFFER_LOG = new SamplingLogger(LOG, Constants.MINUTE_MS); private static final long SLOW_BUFFER_MS = ServerConfiguration.getMs(PropertyKey.WORKER_REMOTE_IO_SLOW_THRESHOLD); /** The executor to run {@link DataReader}. */ private final ExecutorService mDataReaderExecutor; /** A serializing executor for sending responses. */ private Executor mSerializingExecutor; /** The Block Worker. */ private final BlockWorker mWorker; private final ReentrantLock mLock = new ReentrantLock(); private final boolean mDomainSocketEnabled; private final AuthenticatedUserInfo mUserInfo; /** * This is only created in the gRPC event thread when a read request is received. * Using "volatile" because we want any value change of this variable to be * visible across both gRPC and I/O threads, meanwhile no atomicity of operation is assumed; */ private volatile BlockReadRequestContext mContext; private final StreamObserver<ReadResponse> mResponseObserver; /** * Creates an instance of {@link BlockReadHandler}. * * @param executorService the executor service to run {@link DataReader}s * @param blockWorker block worker * @param responseObserver the response observer of the * @param userInfo the authenticated user info * @param domainSocketEnabled if domain socket is enabled */ BlockReadHandler(ExecutorService executorService, BlockWorker blockWorker, StreamObserver<ReadResponse> responseObserver, AuthenticatedUserInfo userInfo, boolean domainSocketEnabled) { mDataReaderExecutor = executorService; mResponseObserver = responseObserver; mUserInfo = userInfo; mSerializingExecutor = new SerializingExecutor(GrpcExecutors.BLOCK_READER_SERIALIZED_RUNNER_EXECUTOR); mWorker = blockWorker; mDomainSocketEnabled = domainSocketEnabled; } @Override public void onNext(alluxio.grpc.ReadRequest request) { // Expected state: context equals null as this handler is new for request. // Otherwise, notify the client an illegal state. Note that, we reset the context before // validation msg as validation may require to update error in context. LOG.debug("Received read request {}.", request); try (LockResource lr = new LockResource(mLock)) { if (request.hasOffsetReceived()) { mContext.setPosReceived(request.getOffsetReceived()); if (!tooManyPendingChunks()) { onReady(); } return; } Preconditions.checkState(mContext == null || !mContext.isDataReaderActive()); mContext = createRequestContext(request); validateReadRequest(request); mContext.setPosToQueue(mContext.getRequest().getStart()); mContext.setPosReceived(mContext.getRequest().getStart()); mDataReaderExecutor.submit(createDataReader(mContext, mResponseObserver)); mContext.setDataReaderActive(true); } catch (RejectedExecutionException e) { handleStreamEndingException(Status.RESOURCE_EXHAUSTED.withCause(e) .withDescription("Failed to create a new data reader")); } catch (Exception e) { handleStreamEndingException( AlluxioStatusException.fromThrowable(e).toGrpcStatusException().getStatus()); } } /** * Handles any exception which should abort the client's read request. * * @param status the type of {@link Status} exception which should be returned to the user */ private void handleStreamEndingException(Status status) { Long sessionId = mContext.getRequest() == null ? -1 : mContext.getRequest().getSessionId(); LogUtils.warnWithException(LOG, "Error occurred while handling read. sessionId: {}. Ending " + "stream", sessionId, status); AlluxioStatusException statusExc = AlluxioStatusException.from(status); try (LockResource lr = new LockResource(mLock)) { if (mContext == null) { mContext = createRequestContext(alluxio.grpc.ReadRequest.newBuilder().build()); } setError(new Error(statusExc, true)); } } /** * @return true if there are too many chunks in-flight */ @GuardedBy("mLock") public boolean tooManyPendingChunks() { return mContext.getPosToQueue() - mContext.getPosReceived() >= MAX_BYTES_IN_FLIGHT; } @Override public void onError(Throwable cause) { BlockReadRequest r = mContext == null ? null : mContext.getRequest(); LogUtils.warnWithException(LOG, "Exception occurred while processing read request onError " + "sessionId: {}, {}", r, r == null ? null : r.getSessionId(), cause); setError(new Error(AlluxioStatusException.fromThrowable(cause), false)); } @Override public void onCompleted() { setCancel(); } /** * Validates a read request. * * @param request the block read request * @throws InvalidArgumentException if the request is invalid */ private void validateReadRequest(alluxio.grpc.ReadRequest request) throws InvalidArgumentException { if (request.getBlockId() < 0) { throw new InvalidArgumentException( String.format("Invalid blockId (%d) in read request.", request.getBlockId())); } if (request.getOffset() < 0 || request.getLength() <= 0) { throw new InvalidArgumentException( String.format("Invalid read bounds in read request %s.", request.toString())); } } /** * @param error the error */ private void setError(Error error) { Preconditions.checkNotNull(error, "error"); try (LockResource lr = new LockResource(mLock)) { if (mContext == null || mContext.getError() != null || mContext.isDoneUnsafe()) { // Note, we may reach here via channelUnregistered due to network errors bubbling up before // mContext is initialized, or channel garbage collection after the request is finished. return; } mContext.setError(error); if (!mContext.isDataReaderActive()) { mContext.setDataReaderActive(true); createDataReader(mContext, mResponseObserver).run(); } } } private void setEof() { try (LockResource lr = new LockResource(mLock)) { if (mContext == null || mContext.getError() != null || mContext.isCancel() || mContext.isEof()) { return; } mContext.setEof(true); if (!mContext.isDataReaderActive()) { mContext.setDataReaderActive(true); createDataReader(mContext, mResponseObserver).run(); } } } private void setCancel() { try (LockResource lr = new LockResource(mLock)) { if (mContext == null || mContext.getError() != null || mContext.isEof() || mContext.isCancel()) { return; } mContext.setCancel(true); if (!mContext.isDataReaderActive()) { mContext.setDataReaderActive(true); createDataReader(mContext, mResponseObserver).run(); } } } /** * @param request the block read request * @return an instance of read request based on the request read from channel */ protected BlockReadRequestContext createRequestContext(alluxio.grpc.ReadRequest request) { BlockReadRequestContext context = new BlockReadRequestContext(request); if (mDomainSocketEnabled) { context.setCounter(MetricsSystem.counter(MetricKey.WORKER_BYTES_READ_DOMAIN.getName())); context.setMeter(MetricsSystem .meter(MetricKey.WORKER_BYTES_READ_DOMAIN_THROUGHPUT.getName())); } else { context.setCounter(MetricsSystem.counter(MetricKey.WORKER_BYTES_READ_REMOTE.getName())); context.setMeter(MetricsSystem .meter(MetricKey.WORKER_BYTES_READ_REMOTE_THROUGHPUT.getName())); } return context; } /** * Creates a read reader. * * @param context read request context * @param response channel * @return the data reader for this handler */ private DataReader createDataReader(BlockReadRequestContext context, StreamObserver<ReadResponse> response) { return new DataReader(context, response); } /** * Ready to restart data reader. */ public void onReady() { try (LockResource lr = new LockResource(mLock)) { if (shouldRestartDataReader()) { try { mDataReaderExecutor.submit(createDataReader(mContext, mResponseObserver)); mContext.setDataReaderActive(true); } catch (RejectedExecutionException e) { handleStreamEndingException(Status.RESOURCE_EXHAUSTED.withCause(e) .withDescription("Failed to create a new data reader")); } } } } /** * @return true if we should restart the data reader */ @GuardedBy("mLock") private boolean shouldRestartDataReader() { return mContext != null && !mContext.isDataReaderActive() && mContext.getPosToQueue() < mContext.getRequest().getEnd() && mContext.getError() == null && !mContext.isCancel() && !mContext.isEof(); } /** * @param bytesRead bytes read */ private void incrementMetrics(long bytesRead) { Counter counter = mContext.getCounter(); Meter meter = mContext.getMeter(); Preconditions.checkState(counter != null); counter.inc(bytesRead); meter.mark(bytesRead); } /** * A runnable that reads data and writes them to the channel. */ private class DataReader implements Runnable { private final CallStreamObserver<ReadResponse> mResponse; private final BlockReadRequestContext mContext; private final BlockReadRequest mRequest; private final long mChunkSize; /** * Creates an instance of the {@link DataReader}. * * @param context context of the request to complete * @param response the response */ DataReader(BlockReadRequestContext context, StreamObserver<ReadResponse> response) { mContext = Preconditions.checkNotNull(context); mRequest = Preconditions.checkNotNull(context.getRequest()); mChunkSize = Math.min(mRequest.getChunkSize(), MAX_CHUNK_SIZE); mResponse = (CallStreamObserver<ReadResponse>) response; } @Override public void run() { try { runInternal(); } catch (Throwable e) { LOG.error("Failed to run DataReader.", e); throw new RuntimeException(e); } } private void runInternal() { boolean eof; // End of file. Everything requested has been read. boolean cancel; Error error; // error occurred, abort requested. while (true) { final long start; final int chunkSize; try (LockResource lr = new LockResource(mLock)) { if (mContext.isDoneUnsafe()) { return; } start = mContext.getPosToQueue(); eof = mContext.isEof(); cancel = mContext.isCancel(); error = mContext.getError(); if (eof || cancel || error != null || (!mResponse.isReady() && tooManyPendingChunks())) { mContext.setDataReaderActive(false); break; } chunkSize = (int) Math.min(mRequest.getEnd() - mContext.getPosToQueue(), mChunkSize); // chunkSize should always be > 0 here when reaches here. Preconditions.checkState(chunkSize > 0); } DataBuffer chunk = null; try { // Once we get the data buffer, the lock on the block has been acquired. // If there are any stream errors during this time, we must unlock the block // before exiting. chunk = getDataBuffer(mContext, start, chunkSize); if (chunk != null) { try (LockResource lr = new LockResource(mLock)) { mContext.setPosToQueue(mContext.getPosToQueue() + chunk.getLength()); } } if (chunk == null || chunk.getLength() < chunkSize || start + chunkSize == mRequest .getEnd()) { // This can happen if the requested read length is greater than the actual length of the // block or file starting from the given offset. setEof(); } if (chunk != null) { DataBuffer finalChunk = chunk; mSerializingExecutor.execute(() -> { try { ReadResponse response = ReadResponse.newBuilder().setChunk(Chunk.newBuilder() .setData(UnsafeByteOperations.unsafeWrap(finalChunk.getReadOnlyByteBuffer())) ).build(); if (mResponse instanceof DataMessageServerStreamObserver) { ((DataMessageServerStreamObserver<ReadResponse>) mResponse) .onNext(new DataMessage<>(response, finalChunk)); } else { mResponse.onNext(response); } incrementMetrics(finalChunk.getLength()); } catch (Exception e) { LogUtils.warnWithException(LOG, "Exception occurred while sending data for read request {}.", mContext.getRequest(), e); setError(new Error(AlluxioStatusException.fromThrowable(e), true)); } finally { finalChunk.release(); } }); } } catch (Exception e) { LogUtils.warnWithException(LOG, "Exception occurred while reading data for read request {}. session {}", mContext.getRequest(), mContext.getRequest().getSessionId(), e); setError(new Error(AlluxioStatusException.fromThrowable(e), true)); } continue; } if (error != null) { try { completeRequest(mContext); } catch (Exception e) { LOG.error("Failed to close the request.", e); } replyError(error); } else if (eof || cancel) { try { completeRequest(mContext); } catch (Exception e) { LogUtils.warnWithException(LOG, "Exception occurred while completing read request, " + "EOF/CANCEL sessionId: {}. {}", mContext.getRequest().getSessionId(), mContext.getRequest(), e); setError(new Error(AlluxioStatusException.fromThrowable(e), true)); } if (eof) { replyEof(); } else { replyCancel(); } } } /** * Completes the read request. When the request is closed, we should clean up any temporary * state it may have accumulated. * * @param context context of the request to complete */ private void completeRequest(BlockReadRequestContext context) throws Exception { BlockReader reader = context.getBlockReader(); try { if (reader != null) { reader.close(); } } finally { context.setBlockReader(null); } } /** * Returns the appropriate {@link DataBuffer} representing the data to send, depending on the * configurable transfer type. * * @param context context of the request to complete * @param len The length, in bytes, of the data to read from the block * @return a {@link DataBuffer} representing the data */ protected DataBuffer getDataBuffer(BlockReadRequestContext context, long offset, int len) throws Exception { @Nullable BlockReader blockReader = null; // timings long openMs = -1; long transferMs = -1; long startMs = System.currentTimeMillis(); try { openBlock(context); openMs = System.currentTimeMillis() - startMs; blockReader = context.getBlockReader(); Preconditions.checkState(blockReader != null); ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer(len, len); try { long startTransferMs = System.currentTimeMillis(); while (buf.writableBytes() > 0 && blockReader.transferTo(buf) != -1) { } transferMs = System.currentTimeMillis() - startTransferMs; return new NettyDataBuffer(buf); } catch (Throwable e) { buf.release(); throw e; } } finally { long durationMs = System.currentTimeMillis() - startMs; if (durationMs >= SLOW_BUFFER_MS) { // This buffer took much longer than expected String prefix = String .format("Getting buffer for remote read took longer than %s ms. ", SLOW_BUFFER_MS) + "reader: " + (blockReader == null ? "null" : blockReader.getClass().getName()); String location = blockReader == null ? "null" : blockReader.getLocation(); // Do not template the reader class, so the sampling log can distinguish between // different reader types SLOW_BUFFER_LOG.warn(prefix + " location: {} bytes: {} openMs: {} transferMs: {} durationMs: {}", location, len, openMs, transferMs, durationMs); } } } /** * Opens the block if it is not open. * * @throws Exception if it fails to open the block */ private void openBlock(BlockReadRequestContext context) throws Exception { if (context.getBlockReader() != null) { return; } BlockReadRequest request = context.getRequest(); // TODO(calvin): Update the locking logic so this can be done better if (request.isPromote()) { try { mWorker.moveBlock(request.getSessionId(), request.getId(), 0); } catch (BlockDoesNotExistException e) { LOG.debug("Block {} to promote does not exist in Alluxio: {}", request.getId(), e.getMessage()); } catch (Exception e) { LOG.warn("Failed to promote block {}: {}", request.getId(), e.getMessage()); } } BlockReader reader = mWorker.createBlockReader(request); context.setBlockReader(reader); if (reader instanceof UnderFileSystemBlockReader) { AlluxioURI ufsMountPointUri = ((UnderFileSystemBlockReader) reader).getUfsMountPointUri(); String ufsString = MetricsSystem.escape(ufsMountPointUri); MetricKey counterKey = MetricKey.WORKER_BYTES_READ_UFS; MetricKey meterKey = MetricKey.WORKER_BYTES_READ_UFS_THROUGHPUT; context.setCounter(MetricsSystem.counterWithTags(counterKey.getName(), counterKey.isClusterAggregated(), MetricInfo.TAG_UFS, ufsString)); context.setMeter(MetricsSystem.meterWithTags(meterKey.getName(), meterKey.isClusterAggregated(), MetricInfo.TAG_UFS, ufsString)); } } /** * Writes an error read response to the channel and closes the channel after that. */ private void replyError(Error error) { mSerializingExecutor.execute(() -> { try { if (!mContext.isDoneUnsafe()) { mResponse.onError(error.getCause().toGrpcStatusException()); mContext.setDoneUnsafe(true); } else { LOG.debug("Tried to replyError when stream was already completed. context: {}", mContext); } } catch (StatusRuntimeException e) { // Ignores the error when client already closed the stream. if (e.getStatus().getCode() != Status.Code.CANCELLED) { throw e; } } }); } /** * Writes a success response. */ private void replyEof() { mSerializingExecutor.execute(() -> { try { if (!mContext.isDoneUnsafe()) { mContext.setDoneUnsafe(true); mResponse.onCompleted(); } else { LOG.debug("Tried to replyEof when stream was already finished. context: {}", mContext); } } catch (StatusRuntimeException e) { if (e.getStatus().getCode() != Status.Code.CANCELLED) { throw e; } } }); } /** * Writes a cancel response. */ private void replyCancel() { mSerializingExecutor.execute(() -> { try { if (!mContext.isDoneUnsafe()) { mContext.setDoneUnsafe(true); mResponse.onCompleted(); } else { LOG.debug("Tried to replyCancel when stream was already finished. context: {}", mContext); } } catch (StatusRuntimeException e) { if (e.getStatus().getCode() != Status.Code.CANCELLED) { throw e; } } }); } } }
/** * Licensed to the Sakai Foundation (SF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The SF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package org.sakaiproject.nakamura.jaxrs; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Enumeration; import java.util.Set; import javax.servlet.RequestDispatcher; import javax.servlet.Servlet; import javax.servlet.ServletContext; import javax.servlet.ServletException; /** * */ public class ServletContextWrapper implements ServletContext { /** * */ private ServletContext delegate; /** * @param delegate */ ServletContextWrapper(ServletContext delegate) { this.delegate = delegate; } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getAttribute(java.lang.String) */ public Object getAttribute(String arg0) { return delegate.getAttribute(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getAttributeNames() */ @SuppressWarnings("unchecked") public Enumeration getAttributeNames() { return delegate.getAttributeNames(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getContext(java.lang.String) */ public ServletContext getContext(String arg0) { return delegate.getContext(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getContextPath() */ public String getContextPath() { return delegate.getContextPath(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getInitParameter(java.lang.String) */ public String getInitParameter(String key) { if ("resteasy.servlet.mapping.prefix".equalsIgnoreCase(key)) { return ResteasyServlet.SERVLET_PATH; } else { return delegate.getInitParameter(key); } } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getInitParameterNames() */ @SuppressWarnings("unchecked") public Enumeration getInitParameterNames() { return delegate.getInitParameterNames(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getMajorVersion() */ public int getMajorVersion() { return delegate.getMajorVersion(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getMimeType(java.lang.String) */ public String getMimeType(String arg0) { return delegate.getMimeType(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getMinorVersion() */ public int getMinorVersion() { return delegate.getMinorVersion(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getNamedDispatcher(java.lang.String) */ public RequestDispatcher getNamedDispatcher(String arg0) { return delegate.getNamedDispatcher(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getRealPath(java.lang.String) */ public String getRealPath(String arg0) { return delegate.getRealPath(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getRequestDispatcher(java.lang.String) */ public RequestDispatcher getRequestDispatcher(String arg0) { return delegate.getRequestDispatcher(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getResource(java.lang.String) */ public URL getResource(String arg0) throws MalformedURLException { return delegate.getResource(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getResourceAsStream(java.lang.String) */ public InputStream getResourceAsStream(String arg0) { return delegate.getResourceAsStream(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getResourcePaths(java.lang.String) */ @SuppressWarnings("unchecked") public Set getResourcePaths(String arg0) { return delegate.getResourcePaths(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getServerInfo() */ public String getServerInfo() { return delegate.getServerInfo(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getServlet(java.lang.String) */ @Deprecated public Servlet getServlet(String arg0) throws ServletException { return delegate.getServlet(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getServletContextName() */ public String getServletContextName() { return delegate.getServletContextName(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getServletNames() */ @Deprecated @SuppressWarnings("unchecked") public Enumeration getServletNames() { return delegate.getServletNames(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#getServlets() */ @Deprecated @SuppressWarnings("unchecked") public Enumeration getServlets() { return delegate.getServlets(); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#log(java.lang.Exception, java.lang.String) */ @Deprecated public void log(Exception arg0, String arg1) { delegate.log(arg0, arg1); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#log(java.lang.String, java.lang.Throwable) */ public void log(String arg0, Throwable arg1) { delegate.log(arg0, arg1); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#log(java.lang.String) */ public void log(String arg0) { delegate.log(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#removeAttribute(java.lang.String) */ public void removeAttribute(String arg0) { delegate.removeAttribute(arg0); } /** * {@inheritDoc} * @see javax.servlet.ServletContext#setAttribute(java.lang.String, java.lang.Object) */ public void setAttribute(String arg0, Object arg1) { delegate.setAttribute(arg0, arg1); } }
package com.goeuro.sync4j.sync; import com.goeuro.sync4j.fs.LoosePath; import com.goeuro.sync4j.fs.Path; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.io.UncheckedIOException; import java.nio.channels.ClosedByInterruptException; import java.time.Duration; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Optional; import java.util.UUID; import java.util.logging.Logger; import static com.goeuro.sync4j.sync.Lock.LockResultType.*; import static com.goeuro.sync4j.sync.LockFile.lockFile; import static com.goeuro.sync4j.sync.LockFileContent.lockFileContent; import static java.lang.Thread.currentThread; import static java.lang.Thread.sleep; import static java.time.Duration.*; import static java.time.OffsetDateTime.now; import static java.util.Optional.*; import static java.util.Optional.of; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.logging.Level.FINE; import static java.util.logging.Level.WARNING; public class Lock<T extends LoosePath> implements AutoCloseable { private static final Logger LOG = Logger.getLogger(Lock.class.getName()); @Nonnull public static <T extends LoosePath> Builder<T> lock(@Nonnull Path<T> file) { return new Builder<>(file); } @Nonnull public static final Duration DEFAULT_REFRESH_EVERY = parse("PT15S"); @Nonnull public static final Duration DEFAULT_TIMEOUT = parse("PT5M"); @Nonnull public static final Duration DEFAULT_RETRY_EVERY = parse("PT15S"); @Nonnull private final UUID id; @Nonnull private final LockFile<T> file; @Nonnull private final Optional<String> owner; @Nonnull private final Duration refreshEvery; @Nonnull private final Duration timeout; @Nonnull private final Duration retryEvery; @Nonnull private Optional<Thread> refreshThread = empty(); protected Lock( @Nonnull UUID id, @Nonnull Path<T> file, @Nonnull Optional<String> owner, @Nonnull Duration refreshEvery, @Nonnull Duration timeout, @Nonnull Duration retryEvery ) { this.id = id; this.file = lockFile(file) .build(); this.owner = owner; this.refreshEvery = refreshEvery; this.timeout = timeout; this.retryEvery = retryEvery; } @Nonnull public LockResult tryLock() { return tryLock(null); } @Nonnull public LockResult tryLock(@Nullable Duration timeout) { return tryLock(timeout, null); } @Nonnull public LockResult tryLock(@Nullable Duration timeout, @Nullable RetryNotifier notifier) { synchronized (this) { refreshThread.ifPresent(ignored -> { throw new IllegalStateException("Already locked."); }); final LocalDateTime start = LocalDateTime.now(); LockResult result; try { result = tryAcquireLock(); } catch (final InterruptedException ignored) { currentThread().interrupt(); return new LockResult(interrupted); } while (!result.success() && timeout != null) { final Duration elapsed = between(start, now()); final Duration left = timeout.minus(elapsed); if (left.compareTo(ZERO) <= 0) { return new LockResult(blocked, result); } if (notifier != null && !notifier.nextAcquireRetryAllowed(this, result, elapsed, timeout)) { return new LockResult(retryRejected, result); } final Duration sleepFor = left.compareTo(retryEvery) > 0 ? retryEvery : left; try { sleep(sleepFor.toMillis()); result = tryAcquireLock(); } catch (final InterruptedException ignored) { currentThread().interrupt(); return new LockResult(interrupted); } } if (!result.success()) { return result; } refresh(); refreshThread = of(createRefreshThread()); return result; } } @Nonnull protected LockResult tryAcquireLock() throws InterruptedException { try { return file().read() .map(this::allowsOverwrite) .orElse(new LockResult(locked)); } catch (final UncheckedIOException e) { if (e.getCause() instanceof ClosedByInterruptException) { final InterruptedException target = new InterruptedException(); target.initCause(e.getCause()); //noinspection ThrowInsideCatchBlockWhichIgnoresCaughtException throw target; } throw e; } } public void unlock() { synchronized (this) { refreshThread.ifPresent(this::unlockBasedOn); } } @Nonnull protected LockResult allowsOverwrite(@Nonnull LockFileContent content) { final Duration lastPingSince = between(content.lastPing(), now()); if (lastPingSince.compareTo(timeout()) > 0) { LOG.log(FINE, () -> "Found old existing lock file '" + file() + "'(" + content + ") which is timed out. Take it over now."); return new LockResult(lockedByTakeover, content); } LOG.log(FINE, () -> "Found existing lock file '" + file() + "' (" + content + ") which is still active. Block lock request."); return new LockResult(blocked, content); } @Nonnull protected Thread createRefreshThread() { final Thread result = new Thread(this::automaticRefresh, toString()); result.setDaemon(true); result.start(); return result; } protected void automaticRefresh() { while (!currentThread().isInterrupted()) { try { //noinspection BusyWait sleep(refreshEvery().toMillis()); refresh(); } catch (final InterruptedException ignored) { currentThread().interrupt(); } catch (final Exception e) { LOG.log(WARNING, e, () -> "Cannot update write lock file '" + file() + "'." + " This is only serious if another process also tries to write to the target location."); } } } protected void refresh() { file().write(lockFileContent() .withId(id()) .lastPingedAt(now(ZoneId.of("Z"))) .withOwner(owner()) .build() ); } @Nonnull public LockFile<T> file() { return file; } @Nonnull public UUID id() { return id; } @Nonnull public Duration refreshEvery() { return refreshEvery; } @Nonnull public Duration retryEvery() { return retryEvery; } @Nonnull public Duration timeout() { return timeout; } @Nonnull public Optional<String> owner() { return owner; } @Nonnull protected Optional<Thread> refreshThread() { synchronized (this) { return refreshThread; } } @Override public void close() { unlock(); } @GuardedBy("this") protected void unlockBasedOn(@Nonnull Thread refreshThread) { try { try { boolean wasInterrupted = currentThread().isInterrupted(); while (refreshThread.isAlive()) { refreshThread.interrupt(); try { refreshThread.join(SECONDS.toMillis(2)); if (refreshThread.isAlive()) { LOG.warning(refreshThread + " was interrupted but is still alive. Continue waiting..."); } } catch (final InterruptedException ignored) { wasInterrupted = true; } } if (wasInterrupted) { currentThread().interrupt(); } } finally { file().delete(); } } finally { this.refreshThread = empty(); } } @Override public String toString() { return getClass().getSimpleName() + ":" + id(); } public static class Builder<T extends LoosePath> { @Nonnull private final Path<T> file; @Nonnull private Optional<UUID> id = empty(); @Nonnull private Optional<String> owner = empty(); @Nonnull private Optional<Duration> refreshEvery = empty(); @Nonnull private Optional<Duration> timeout = empty(); @Nonnull private Optional<Duration> retryEvery = empty(); protected Builder(@Nonnull Path<T> file) { this.file = file; } @Nonnull public Builder<T> withId(@Nullable UUID id) { this.id = ofNullable(id); return this; } @Nonnull public Builder<T> withOwner(@Nullable String owner) { this.owner = ofNullable(owner); return this; } @Nonnull public Builder<T> whichRefreshesEvery(@Nullable Duration refreshEvery) { this.refreshEvery = ofNullable(refreshEvery); return this; } @Nonnull public Builder<T> withTimeoutAfter(@Nullable Duration timeout) { this.timeout = ofNullable(timeout); return this; } @Nonnull public Builder<T> withRetryEvery(@Nullable Duration retryEvery) { this.retryEvery = ofNullable(retryEvery); return this; } @Nonnull public Lock<T> build() { return new Lock<>( id.orElseGet(UUID::randomUUID), file, owner, refreshEvery.orElse(DEFAULT_REFRESH_EVERY), timeout.orElse(DEFAULT_TIMEOUT), retryEvery.orElse(DEFAULT_RETRY_EVERY) ); } } public static class LockResult { @Nonnull private final LockResultType type; @Nonnull private final Optional<LockFileContent> otherContent; protected LockResult( @Nonnull LockResultType type, @Nullable LockFileContent otherContent ) { this.type = type; this.otherContent = ofNullable(otherContent); } protected LockResult( @Nonnull LockResultType type ) { this(type, (LockFileContent) null); } protected LockResult( @Nonnull LockResultType type, @Nullable LockResult previous ) { this(type, previous != null ? previous.otherContent().orElse(null) : null); } @Nonnull public LockResultType type() { return type; } @Nonnull public Optional<LockFileContent> otherContent() { return otherContent; } public boolean success() { return type().success(); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append(type()); otherContent().ifPresent(content -> sb.append(", other: ").append(content)); return sb.toString(); } } public enum LockResultType { locked(true), lockedByTakeover(true), blocked(false), interrupted(false), retryRejected(false); private final boolean success; LockResultType(boolean success) { this.success = success; } public boolean success() { return success; } } @FunctionalInterface public interface RetryNotifier { boolean nextAcquireRetryAllowed(@Nonnull Lock<? extends LoosePath> lock, @Nonnull LockResult lockResult, @Nonnull Duration elapsed, @Nonnull Duration timeout); } }
/* * Copyright 2015 LG CNS. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.client.group.view; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.csstudio.swt.xygraph.dataprovider.CircularBufferDataProvider; import org.csstudio.swt.xygraph.dataprovider.IDataProvider; import org.csstudio.swt.xygraph.dataprovider.Sample; import org.csstudio.swt.xygraph.figures.Trace; import org.csstudio.swt.xygraph.figures.Trace.PointStyle; import org.csstudio.swt.xygraph.figures.Trace.TraceType; import org.csstudio.swt.xygraph.figures.XYGraph; import org.eclipse.draw2d.FigureCanvas; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.window.DefaultToolTip; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.ControlListener; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.graphics.PaletteData; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.RGB; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.ui.IViewSite; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import scouter.client.Images; import scouter.client.group.DatePeriodUnit; import scouter.client.group.GroupManager; import scouter.client.model.AgentColorManager; import scouter.client.model.AgentModelThread; import scouter.client.model.AgentObject; import scouter.client.model.TextProxy; import scouter.client.net.INetReader; import scouter.client.net.TcpProxy; import scouter.client.popup.DualCalendarDialog; import scouter.client.preferences.PManager; import scouter.client.preferences.PreferenceConstants; import scouter.client.server.Server; import scouter.client.server.ServerManager; import scouter.client.threads.ObjectSelectManager; import scouter.client.threads.ObjectSelectManager.IObjectCheckListener; import scouter.client.util.ChartUtil; import scouter.client.util.ColorUtil; import scouter.client.util.CounterUtil; import scouter.client.util.ExUtil; import scouter.client.util.ImageUtil; import scouter.client.util.ScouterUtil; import scouter.client.util.TimeUtil; import scouter.client.util.UIUtil; import scouter.client.views.ScouterViewPart; import scouter.lang.pack.MapPack; import scouter.lang.pack.Pack; import scouter.lang.value.ListValue; import scouter.io.DataInputX; import scouter.net.RequestCmd; import scouter.util.DateUtil; import scouter.util.FormatUtil; import scouter.util.HashUtil; import scouter.util.StringUtil; public class CounterPastDateGroupAllView extends ScouterViewPart implements DualCalendarDialog.ILoadDualCounterDialog, IObjectCheckListener { public static final String ID = CounterPastDateGroupAllView.class.getName(); private String grpName; private String objType; private String counter; private String sDate = DateUtil.yyyymmdd(TimeUtil.getCurrentTime() - DateUtil.MILLIS_PER_DAY); private String eDate = DateUtil.yyyymmdd(TimeUtil.getCurrentTime() - DateUtil.MILLIS_PER_DAY); private Server defaultServer = ServerManager.getInstance().getDefaultServer(); protected XYGraph xyGraph; IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); protected List<Trace> traces = new ArrayList<Trace>(); private Map<Integer, ListValue> serverObjMap = new HashMap<Integer, ListValue>(); protected FigureCanvas canvas; public void init(IViewSite site) throws PartInitException { super.init(site); String secId = site.getSecondaryId(); String[] datas = secId.split("&"); grpName = datas[0]; objType = datas[1]; counter = datas[2]; } public void createPartControl(Composite parent) { String displayCounter = defaultServer.getCounterEngine().getCounterDisplayName(objType, counter); setPartName(grpName + " - " + displayCounter); setTitleImage(Images.getCounterImage(objType, counter, defaultServer.getId())); String unit = defaultServer.getCounterEngine().getCounterUnit(objType, counter); statusMessage = grpName + " | (PastDate) All " + displayCounter + (StringUtil.isNotEmpty(unit) ? "(" + unit + ")" : ""); Composite composite = new Composite(parent, SWT.NONE); GridLayout gLayout = new GridLayout(1, true); gLayout.horizontalSpacing = 0; gLayout.marginHeight = 0; gLayout.marginWidth = 0; composite.setLayout(gLayout); createUpperMenu(composite); Composite chartComposite = new Composite(composite, SWT.NONE); chartComposite.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true, true)); chartComposite.setLayout(UIUtil.formLayout(0, 0)); chartComposite.setLayout(UIUtil.formLayout(0, 0)); chartComposite.setBackground(ColorUtil.getInstance().getColor(SWT.COLOR_WHITE)); canvas = new FigureCanvas(chartComposite); canvas.setScrollBarVisibility(FigureCanvas.NEVER); canvas.setBackground(ColorUtil.getInstance().getColor(SWT.COLOR_WHITE)); canvas.setLayoutData(UIUtil.formData(0, 0, 0, 0, 100, 0, 100, 0)); canvas.addControlListener(new ControlListener() { boolean lock = false; public void controlResized(ControlEvent e) { org.eclipse.swt.graphics.Rectangle r = canvas.getClientArea(); if (!lock) { lock = true; if (ChartUtil.isShowLegendAllowSize(r.width, r.height)) { xyGraph.setShowLegend(true); } else { xyGraph.setShowLegend(false); } r = canvas.getClientArea(); xyGraph.setSize(r.width, r.height); lock = false; } } public void controlMoved(ControlEvent e) { } }); canvas.addKeyListener(new KeyListener() { public void keyReleased(KeyEvent e) { } public void keyPressed(KeyEvent e) { if (e.keyCode == SWT.F5) { forceRefresh(); } } }); final DefaultToolTip toolTip = new DefaultToolTip(canvas, DefaultToolTip.RECREATE, true); toolTip.setFont(new Font(null, "Arial", 10, SWT.BOLD)); toolTip.setBackgroundColor(Display.getCurrent().getSystemColor(SWT.COLOR_INFO_BACKGROUND)); canvas.addMouseListener(new MouseListener() { public void mouseUp(MouseEvent e) { onDeselectObject(); toolTip.hide(); } public void mouseDown(MouseEvent e) { double x = xyGraph.primaryXAxis.getPositionValue(e.x, false); double y = xyGraph.primaryYAxis.getPositionValue(e.y, false); if (x < 0 || y < 0) { return; } Image image = new Image(e.display, 1, 10); GC gc = new GC((FigureCanvas)e.widget); gc.copyArea(image, e.x, e.y > 5 ? e.y - 5 : 0); ImageData imageData = image.getImageData(); PaletteData palette = imageData.palette; RGB white = new RGB(255, 255, 255); int point = 5; int offset = 0; while (point >= 0 && point < 10) { int pixelValue = imageData.getPixel(0, point); RGB rgb = palette.getRGB(pixelValue); if (white.equals(rgb) == false) { int objHash = AgentColorManager.getInstance().getObjectHash(rgb); if (objHash != 0) { String objName = TextProxy.object.getText(objHash); double time = xyGraph.primaryXAxis.getPositionValue(e.x, false); double v = 0.0d; for (Trace t : traces) { if (t.getName().equals(objName)) { v = ScouterUtil.getNearestValue(t.getDataProvider(), time); String value = FormatUtil.print(v, "#,###.##"); toolTip.setText(objName + "\nvalue : " + value); toolTip.show(new Point(e.x, e.y)); onSelectObject(objHash, objName, objType); break; } } break; } } offset = offset >= 0 ? offset + 1 : offset - 1; offset *= -1; point += offset; } gc.dispose(); image.dispose(); } public void mouseDoubleClick(MouseEvent e) {} }); xyGraph = new XYGraph(); xyGraph.setShowTitle(false); canvas.setContents(xyGraph); xyGraph.primaryXAxis.setDateEnabled(true); xyGraph.primaryXAxis.setShowMajorGrid(true); xyGraph.primaryYAxis.setAutoScale(true); xyGraph.primaryYAxis.setShowMajorGrid(true); xyGraph.primaryXAxis.setTitle(""); xyGraph.primaryYAxis.setTitle(""); IToolBarManager man = getViewSite().getActionBars().getToolBarManager(); man.add(new Action("Reload", ImageUtil.getImageDescriptor(Images.refresh)) { public void run() { forceRefresh(); } }); man.add(new Separator()); man.add(new Action("Duplicate", ImageUtil.getImageDescriptor(Images.copy)) { public void run() { ExUtil.exec(new Runnable() { public void run() { try { window.getActivePage().showView( CounterPastDateGroupAllView.ID, grpName + "&" + objType + "&" + counter + "&" + TimeUtil.getCurrentTime(), IWorkbenchPage.VIEW_ACTIVATE); } catch (PartInitException e) { e.printStackTrace(); } } }); } }); ObjectSelectManager.getInstance().addObjectCheckStateListener(this); forceRefresh(); } Label serverText, sDateText, eDateText; DualCalendarDialog calDialog; Combo periodCombo; Composite headerComp; Button applyBtn; private void createUpperMenu(Composite composite) { headerComp = new Composite(composite, SWT.NONE); headerComp.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false)); headerComp.setLayout(UIUtil.formLayout(0, 0)); applyBtn = new Button(headerComp, SWT.PUSH); applyBtn.setLayoutData(UIUtil.formData(null, -1, 0, 2, 100, -5, null, -1)); applyBtn.setText("Apply"); applyBtn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event event) { switch (event.type) { case SWT.Selection: forceRefresh(); break; } } }); Button manualBtn = new Button(headerComp, SWT.PUSH); manualBtn.setImage(Images.CTXMENU_RDC); manualBtn.setText("Manual"); manualBtn.setLayoutData(UIUtil.formData(null, -1, 0, 2, applyBtn, -5, null, -1)); manualBtn.addListener(SWT.Selection, new Listener() { public void handleEvent(Event event) { switch (event.type) { case SWT.Selection: Display display = Display.getCurrent(); if (display == null) { display = Display.getDefault(); } calDialog = new DualCalendarDialog(display, CounterPastDateGroupAllView.this); calDialog.show(UIUtil.getMousePosition()); break; } } }); periodCombo = new Combo(headerComp, SWT.VERTICAL | SWT.BORDER | SWT.READ_ONLY); periodCombo.setLayoutData(UIUtil.formData(null, -1, 0, 3, manualBtn, -5, null, -1)); DatePeriodUnit[] periodArray = DatePeriodUnit.values(); int index = 0; for (; index < periodArray.length; index++) { periodCombo.add(periodArray[index].getLabel(), index); periodCombo.setData(periodArray[index].getLabel(), periodArray[index].getTime()); } periodCombo.select(index - 1); periodCombo.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { long time = (Long) periodCombo.getData(periodCombo.getText()); long etime = DateUtil.yyyymmdd(eDate); sDate = DateUtil.yyyymmdd(etime - (time - DateUtil.MILLIS_PER_DAY)); setHeadText(); } }); eDateText = new Label(headerComp, SWT.NONE); eDateText.setLayoutData(UIUtil.formData(null, -1, 0, 7, periodCombo, -5, null, -1)); Label windbarLabel = new Label(headerComp, SWT.NONE); windbarLabel.setLayoutData(UIUtil.formData(null, -1, 0, 7, eDateText, -5, null, -1)); windbarLabel.setText("~"); sDateText = new Label(headerComp, SWT.NONE); sDateText.setLayoutData(UIUtil.formData(null, -1, 0, 7, windbarLabel, -5, null, -1)); serverText = new Label(headerComp, SWT.NONE | SWT.RIGHT); serverText.setLayoutData(UIUtil.formData(0, 0, 0, 7, sDateText, -5, null, -1)); setHeadText(); } private void setHeadText() { serverText.setText(grpName + " |"); sDateText.setText(sDate.substring(0, 4) + "-" + sDate.substring(4, 6) + "-" + sDate.substring(6, 8) ); eDateText.setText(eDate.substring(0, 4) + "-" + eDate.substring(4, 6) + "-" + eDate.substring(6, 8) ); } public void onPressedOk(String date) {} public void onPressedCancel() {} public void setFocus() { super.setFocus(); } public void dispose() { ObjectSelectManager.getInstance().removeObjectCheckStateListener(this); } private double getMaxValue() { double max = 0.0; for (Trace trace : traces) { CircularBufferDataProvider data = (CircularBufferDataProvider) trace.getDataProvider(); if (data != null) { for (int inx = 0; inx < data.getSize(); inx++) { Sample sample = (Sample) data.getSample(inx); double y = sample.getYValue(); if (y > max) { max = y; } } } } return ChartUtil.getMaxValue(max); } private IDataProvider getDataProvider(int objHash, int size) { CircularBufferDataProvider provider = new CircularBufferDataProvider(true); provider.setBufferSize(size); String name = StringUtil.trimToEmpty(TextProxy.object.getText(objHash)); Trace trace = new Trace(name, xyGraph.primaryXAxis, xyGraph.primaryYAxis, provider); trace.setPointStyle(PointStyle.NONE); trace.getXAxis().setFormatPattern("yyyy-MM-dd\n HH:mm:ss"); trace.getYAxis().setFormatPattern("#,##0"); trace.setLineWidth(PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH)); trace.setTraceType(TraceType.SOLID_LINE); trace.setTraceColor(AgentColorManager.getInstance().assignColor(objType, objHash)); xyGraph.addTrace(trace); traces.add(trace); return provider; } private void forceRefresh() { for (Trace trace : traces) { xyGraph.removeTrace(trace); } traces.clear(); ExUtil.asyncRun(new Runnable() { public void run() { load(); } }); } private GroupManager manager = GroupManager.getInstance(); private void collectObj() { serverObjMap.clear(); Set<Integer> objHashs = manager.getObjectsByGroup(grpName); for (int objHash : objHashs) { AgentObject agentObj = AgentModelThread.getInstance().getAgentObject(objHash); if (agentObj == null || agentObj.isAlive() == false) { continue; } int serverId = agentObj.getServerId(); ListValue lv = serverObjMap.get(serverId); if (lv == null) { lv = new ListValue(); serverObjMap.put(serverId, lv); } lv.add(objHash); } } private void load() { final long stime = DateUtil.yyyymmdd(sDate); final long etime = DateUtil.yyyymmdd(eDate) + (DateUtil.MILLIS_PER_DAY - 1); collectObj(); Iterator<Integer> serverIds = serverObjMap.keySet().iterator(); final List<Pack> result = new ArrayList<Pack>(); while (serverIds.hasNext()) { int serverId = serverIds.next(); TcpProxy tcp = TcpProxy.getTcpProxy(serverId); try { MapPack param = new MapPack(); param.put("stime", stime); param.put("etime", etime); param.put("counter", counter); param.put("objHash", serverObjMap.get(serverId)); tcp.process(RequestCmd.COUNTER_PAST_LONGDATE_GROUP, param, new INetReader() { public void process(DataInputX in) throws IOException { Pack p = in.readPack(); result.add(p); } }); } catch (Exception e) { e.printStackTrace(); } finally { TcpProxy.putTcpProxy(tcp); } } ExUtil.exec(canvas, new Runnable() { public void run() { xyGraph.primaryXAxis.setRange(stime, etime); for (Pack pack : result) { if ((pack instanceof MapPack) == false) { continue; } MapPack m = (MapPack) pack; int objHash = m.getInt("objHash"); ListValue time = m.getList("time"); ListValue value = m.getList("value"); if (time == null || time.size() < 1) { continue; } CircularBufferDataProvider provider = (CircularBufferDataProvider) getDataProvider(objHash, (int) ((etime - stime) / DateUtil.MILLIS_PER_FIVE_MINUTE)); provider.clearTrace(); for (int i = 0; i < time.size(); i++) { long x = time.getLong(i); double y = value.getDouble(i); provider.addSample(new Sample(x, y)); } } if (CounterUtil.isPercentValue(objType, counter)) { xyGraph.primaryYAxis.setRange(0, 100); } else { double max = getMaxValue(); xyGraph.primaryYAxis.setRange(0, max); } } }); } public void onSelectObject(int objHash, final String objName, String objType) { if (objType.equals(this.objType) == false) { return; } ExUtil.exec(canvas, new Runnable() { public void run() { int width = PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH); for (Trace t : traces) { if (t.getName().equals(objName)) { t.setLineWidth(width + 2); break; } } } }); } public void onDeselectObject() { ExUtil.exec(canvas, new Runnable() { public void run() { int width = PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH); for (Trace t : traces) { t.setLineWidth(width); } } }); } public void onPressedOk(String sDate, String eDate) { this.sDate = sDate; this.eDate = eDate; setHeadText(); } public void onPressedOk(long startTime, long endTime) { } public void redraw() { if (canvas != null && canvas.isDisposed() == false) { canvas.redraw(); xyGraph.repaint(); } } public void notifyChangeState() { ExUtil.asyncRun(new Runnable() { public void run() { for (Trace t : traces) { String objName = t.getName(); if (ObjectSelectManager.getInstance().isUnselectedObject(HashUtil.hash(objName))) { t.setVisible(false); } else { t.setVisible(true); } } ExUtil.exec(canvas, new Runnable() { public void run() { redraw(); } }); } }); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.execution; import com.facebook.presto.Session; import com.facebook.presto.Session.SessionBuilder; import com.facebook.presto.metadata.MetadataManager; import com.facebook.presto.security.AllowAllAccessControl; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.transaction.IsolationLevel; import com.facebook.presto.sql.analyzer.SemanticException; import com.facebook.presto.sql.tree.Isolation; import com.facebook.presto.sql.tree.StartTransaction; import com.facebook.presto.sql.tree.TransactionAccessMode; import com.facebook.presto.transaction.TransactionId; import com.facebook.presto.transaction.TransactionInfo; import com.facebook.presto.transaction.TransactionManager; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; import java.net.URI; import java.util.concurrent.CompletionException; import java.util.concurrent.ExecutorService; import static com.facebook.presto.spi.StandardErrorCode.INCOMPATIBLE_CLIENT; import static com.facebook.presto.spi.StandardErrorCode.NOT_SUPPORTED; import static com.facebook.presto.sql.analyzer.SemanticErrorCode.INVALID_TRANSACTION_MODE; import static com.facebook.presto.testing.TestingSession.testSessionBuilder; import static com.facebook.presto.tpch.TpchMetadata.TINY_SCHEMA_NAME; import static com.facebook.presto.transaction.TransactionManager.createTestTransactionManager; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static java.util.concurrent.Executors.newCachedThreadPool; public class TestStartTransactionTask { private final MetadataManager metadata = MetadataManager.createTestMetadataManager(); private final ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("stage-executor-%s")); @AfterClass(alwaysRun = true) public void tearDown() throws Exception { executor.shutdownNow(); } @Test public void testNonTransactionalClient() throws Exception { Session session = sessionBuilder().build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); try { try { new StartTransactionTask().execute(new StartTransaction(ImmutableList.of()), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.fail(); } catch (CompletionException e) { throw Throwables.propagate(e.getCause()); } } catch (PrestoException e) { Assert.assertEquals(e.getErrorCode(), INCOMPATIBLE_CLIENT.toErrorCode()); } Assert.assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); } @Test public void testNestedTransaction() throws Exception { TransactionManager transactionManager = createTestTransactionManager(); Session session = sessionBuilder() .setTransactionId(TransactionId.create()) .setClientTransactionSupport() .build(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); try { try { new StartTransactionTask().execute(new StartTransaction(ImmutableList.of()), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.fail(); } catch (CompletionException e) { throw Throwables.propagate(e.getCause()); } } catch (PrestoException e) { Assert.assertEquals(e.getErrorCode(), NOT_SUPPORTED.toErrorCode()); } Assert.assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); } @Test public void testStartTransaction() throws Exception { Session session = sessionBuilder() .setClientTransactionSupport() .build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); new StartTransactionTask().execute(new StartTransaction(ImmutableList.of()), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertTrue(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); Assert.assertEquals(transactionManager.getAllTransactionInfos().size(), 1); TransactionInfo transactionInfo = transactionManager.getTransactionInfo(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().get()); Assert.assertFalse(transactionInfo.isAutoCommitContext()); } @Test public void testStartTransactionExplicitModes() throws Exception { Session session = sessionBuilder() .setClientTransactionSupport() .build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); new StartTransactionTask().execute(new StartTransaction(ImmutableList.of(new Isolation(Isolation.Level.SERIALIZABLE), new TransactionAccessMode(true))), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertTrue(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); Assert.assertEquals(transactionManager.getAllTransactionInfos().size(), 1); TransactionInfo transactionInfo = transactionManager.getTransactionInfo(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().get()); Assert.assertEquals(transactionInfo.getIsolationLevel(), IsolationLevel.SERIALIZABLE); Assert.assertTrue(transactionInfo.isReadOnly()); Assert.assertFalse(transactionInfo.isAutoCommitContext()); } @Test public void testStartTransactionTooManyIsolationLevels() throws Exception { Session session = sessionBuilder() .setClientTransactionSupport() .build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); try { try { new StartTransactionTask().execute(new StartTransaction(ImmutableList.of(new Isolation(Isolation.Level.READ_COMMITTED), new Isolation(Isolation.Level.READ_COMMITTED))), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.fail(); } catch (CompletionException e) { throw Throwables.propagate(e.getCause()); } } catch (SemanticException e) { Assert.assertEquals(e.getCode(), INVALID_TRANSACTION_MODE); } Assert.assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); } @Test public void testStartTransactionTooManyAccessModes() throws Exception { Session session = sessionBuilder() .setClientTransactionSupport() .build(); TransactionManager transactionManager = createTestTransactionManager(); QueryStateMachine stateMachine = QueryStateMachine.begin(new QueryId("query"), "START TRANSACTION", session, URI.create("fake://uri"), true, transactionManager, executor); Assert.assertFalse(stateMachine.getSession().getTransactionId().isPresent()); try { try { new StartTransactionTask().execute(new StartTransaction(ImmutableList.of(new TransactionAccessMode(true), new TransactionAccessMode(true))), transactionManager, metadata, new AllowAllAccessControl(), stateMachine).join(); Assert.fail(); } catch (CompletionException e) { throw Throwables.propagate(e.getCause()); } } catch (SemanticException e) { Assert.assertEquals(e.getCode(), INVALID_TRANSACTION_MODE); } Assert.assertTrue(transactionManager.getAllTransactionInfos().isEmpty()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().isClearTransactionId()); Assert.assertFalse(stateMachine.getQueryInfoWithoutDetails().getStartedTransactionId().isPresent()); } private static SessionBuilder sessionBuilder() { return testSessionBuilder() .setCatalog("tpch") .setSchema(TINY_SCHEMA_NAME); } }
/*-- $Id: ProcessingInstruction.java,v 1.47 2007/11/10 05:28:59 jhunter Exp $ Copyright (C) 2000-2007 Jason Hunter & Brett McLaughlin. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions, and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions, and the disclaimer that follows these conditions in the documentation and/or other materials provided with the distribution. 3. The name "JDOM" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact <request_AT_jdom_DOT_org>. 4. Products derived from this software may not be called "JDOM", nor may "JDOM" appear in their name, without prior written permission from the JDOM Project Management <request_AT_jdom_DOT_org>. In addition, we request (but do not require) that you include in the end-user documentation provided with the redistribution and/or in the software itself an acknowledgement equivalent to the following: "This product includes software developed by the JDOM Project (http://www.jdom.org/)." Alternatively, the acknowledgment may be graphical using the logos available at http://www.jdom.org/images/logos. THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JDOM AUTHORS OR THE PROJECT CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. This software consists of voluntary contributions made by many individuals on behalf of the JDOM Project and was originally created by Jason Hunter <jhunter_AT_jdom_DOT_org> and Brett McLaughlin <brett_AT_jdom_DOT_org>. For more information on the JDOM Project, please see <http://www.jdom.org/>. */ package org.jdom; import java.util.*; /** * An XML processing instruction. Methods allow the user to obtain the target of * the PI as well as its data. The data can always be accessed as a String or, * if the data appears akin to an attribute list, can be retrieved as name/value * pairs. * * @version $Revision: 1.47 $, $Date: 2007/11/10 05:28:59 $ * @author Brett McLaughlin * @author Jason Hunter * @author Steven Gould */ public class ProcessingInstruction extends Content { private static final String CVS_ID = "@(#) $RCSfile: ProcessingInstruction.java,v $ $Revision: 1.47 $ $Date: 2007/11/10 05:28:59 $ $Name: jdom_1_1 $"; /** The target of the PI */ protected String target; /** The data for the PI as a String */ protected String rawData; /** The data for the PI in name/value pairs */ protected Map mapData; /** * Default, no-args constructor for implementations * to use if needed. */ protected ProcessingInstruction() { } /** * This will create a new <code>ProcessingInstruction</code> * with the specified target and data. * * @param target <code>String</code> target of PI. * @param data <code>Map</code> data for PI, in * name/value pairs * @throws IllegalTargetException if the given target is illegal * as a processing instruction name. */ public ProcessingInstruction(String target, Map data) { setTarget(target); setData(data); } /** * This will create a new <code>ProcessingInstruction</code> * with the specified target and data. * * @param target <code>String</code> target of PI. * @param data <code>String</code> data for PI. * @throws IllegalTargetException if the given target is illegal * as a processing instruction name. */ public ProcessingInstruction(String target, String data) { setTarget(target); setData(data); } /** * This will set the target for the PI. * * @param newTarget <code>String</code> new target of PI. * @return <code>ProcessingInstruction</code> - this PI modified. */ public ProcessingInstruction setTarget(String newTarget) { String reason; if ((reason = Verifier.checkProcessingInstructionTarget(newTarget)) != null) { throw new IllegalTargetException(newTarget, reason); } target = newTarget; return this; } /** * Returns the XPath 1.0 string value of this element, which is the * data of this PI. * * @return the data of this PI */ public String getValue() { return rawData; } /** * This will retrieve the target of the PI. * * @return <code>String</code> - target of PI. */ public String getTarget() { return target; } /** * This will return the raw data from all instructions. * * @return <code>String</code> - data of PI. */ public String getData() { return rawData; } /** * This will return a <code>List</code> containing the names of the * "attribute" style pieces of name/value pairs in this PI's data. * * @return <code>List</code> - the <code>List</code> containing the * "attribute" names. */ public List getPseudoAttributeNames() { Set mapDataSet = mapData.entrySet(); List nameList = new ArrayList(); for (Iterator i = mapDataSet.iterator(); i.hasNext();) { String wholeSet = (i.next()).toString(); String attrName = wholeSet.substring(0,(wholeSet.indexOf("="))); nameList.add(attrName); } return nameList; } /** * This will set the raw data for the PI. * * @param data <code>String</code> data of PI. * @return <code>ProcessingInstruction</code> - this PI modified. */ public ProcessingInstruction setData(String data) { String reason = Verifier.checkProcessingInstructionData(data); if (reason != null) { throw new IllegalDataException(data, reason); } this.rawData = data; this.mapData = parseData(data); return this; } /** * This will set the name/value pairs within the passed * <code>Map</code> as the pairs for the data of * this PI. The keys should be the pair name * and the values should be the pair values. * * @param data new map data to use * @return <code>ProcessingInstruction</code> - modified PI. */ public ProcessingInstruction setData(Map data) { String temp = toString(data); String reason = Verifier.checkProcessingInstructionData(temp); if (reason != null) { throw new IllegalDataException(temp, reason); } this.rawData = temp; this.mapData = data; return this; } /** * This will return the value for a specific * name/value pair on the PI. If no such pair is * found for this PI, null is returned. * * @param name <code>String</code> name of name/value pair * to lookup value for. * @return <code>String</code> - value of name/value pair. */ public String getPseudoAttributeValue(String name) { return (String)mapData.get(name); } /** * This will set a pseudo attribute with the given name and value. * If the PI data is not already in a pseudo-attribute format, this will * replace the existing data. * * @param name <code>String</code> name of pair. * @param value <code>String</code> value for pair. * @return <code>ProcessingInstruction</code> this PI modified. */ public ProcessingInstruction setPseudoAttribute(String name, String value) { String reason = Verifier.checkProcessingInstructionData(name); if (reason != null) { throw new IllegalDataException(name, reason); } reason = Verifier.checkProcessingInstructionData(value); if (reason != null) { throw new IllegalDataException(value, reason); } this.mapData.put(name, value); this.rawData = toString(mapData); return this; } /** * This will remove the pseudo attribute with the specified name. * * @param name name of pseudo attribute to remove * @return <code>boolean</code> - whether the requested * instruction was removed. */ public boolean removePseudoAttribute(String name) { if ((mapData.remove(name)) != null) { rawData = toString(mapData); return true; } return false; } /** * This will convert the Map to a string representation. * * @param mapData <code>Map</code> PI data to convert * @return a string representation of the Map as appropriate for a PI */ private String toString(Map mapData) { StringBuffer rawData = new StringBuffer(); Iterator i = mapData.keySet().iterator(); while (i.hasNext()) { String name = (String)i.next(); String value = (String)mapData.get(name); rawData.append(name) .append("=\"") .append(value) .append("\" "); } // Remove last space, if we did any appending if (rawData.length() > 0) { rawData.setLength(rawData.length() - 1); } return rawData.toString(); } /** * This will parse and load the instructions for the PI. * This is separated to allow it to occur once and then be reused. */ private Map parseData(String rawData) { // The parsing here is done largely "by hand" which means the code // gets a little tricky/messy. The following conditions should // now be handled correctly: // <?pi href="http://hi/a=b"?> Reads OK // <?pi href = 'http://hi/a=b' ?> Reads OK // <?pi href\t = \t'http://hi/a=b'?> Reads OK // <?pi href = "http://hi/a=b"?> Reads OK // <?pi?> Empty Map // <?pi id=22?> Empty Map // <?pi id='22?> Empty Map Map data = new HashMap(); // System.out.println("rawData: " + rawData); // The inputData variable holds the part of rawData left to parse String inputData = rawData.trim(); // Iterate through the remaining inputData string while (!inputData.trim().equals("")) { //System.out.println("parseData() looking at: " + inputData); // Search for "name =", "name=" or "name1 name2..." String name = ""; String value = ""; int startName = 0; char previousChar = inputData.charAt(startName); int pos = 1; for (; pos<inputData.length(); pos++) { char currentChar = inputData.charAt(pos); if (currentChar == '=') { name = inputData.substring(startName, pos).trim(); // Get the boundaries on the quoted string // We use boundaries so we know where to start next int[] bounds = extractQuotedString( inputData.substring(pos+1)); // A null value means a parse error and we return empty! if (bounds == null) { return new HashMap(); } value = inputData.substring(bounds[0]+pos+1, bounds[1]+pos+1); pos += bounds[1] + 1; // skip past value break; } else if (Character.isWhitespace(previousChar) && !Character.isWhitespace(currentChar)) { startName = pos; } previousChar = currentChar; } // Remove the first pos characters; they have been processed inputData = inputData.substring(pos); // System.out.println("Extracted (name, value) pair: (" // + name + ", '" + value+"')"); // If both a name and a value have been found, then add // them to the data Map if (name.length() > 0 && value != null) { //if (data.containsKey(name)) { // A repeat, that's a parse error, so return a null map //return new HashMap(); //} //else { data.put(name, value); //} } } return data; } /** * This is a helper routine, only used by parseData, to extract a * quoted String from the input parameter, rawData. A quoted string * can use either single or double quotes, but they must match up. * A singly quoted string can contain an unbalanced amount of double * quotes, or vice versa. For example, the String "JDOM's the best" * is legal as is 'JDOM"s the best'. * * @param rawData the input string from which a quoted string is to * be extracted. * @return the first quoted string encountered in the input data. If * no quoted string is found, then the empty string, "", is * returned. * @see #parseData */ private static int[] extractQuotedString(String rawData) { // Remembers whether we're actually in a quoted string yet boolean inQuotes = false; // Remembers which type of quoted string we're in char quoteChar = '"'; // Stores the position of the first character inside // the quoted string (i.e. the start of the return string) int start = 0; // Iterate through the input string looking for the start // and end of the quoted string for (int pos=0; pos < rawData.length(); pos++) { char currentChar = rawData.charAt(pos); if (currentChar=='"' || currentChar=='\'') { if (!inQuotes) { // We're entering a quoted string quoteChar = currentChar; inQuotes = true; start = pos+1; } else if (quoteChar == currentChar) { // We're leaving a quoted string inQuotes = false; return new int[] { start, pos }; } // Otherwise we've encountered a quote // inside a quote, so just continue } } return null; } /** * This returns a <code>String</code> representation of the * <code>ProcessingInstruction</code>, suitable for debugging. If the XML * representation of the <code>ProcessingInstruction</code> is desired, * {@link org.jdom.output.XMLOutputter#outputString(ProcessingInstruction)} * should be used. * * @return <code>String</code> - information about the * <code>ProcessingInstruction</code> */ public String toString() { return new StringBuffer() .append("[ProcessingInstruction: ") .append(new org.jdom.output.XMLOutputter().outputString(this)) .append("]") .toString(); } /** * This will return a clone of this <code>ProcessingInstruction</code>. * * @return <code>Object</code> - clone of this * <code>ProcessingInstruction</code>. */ public Object clone() { ProcessingInstruction pi = (ProcessingInstruction) super.clone(); // target and rawdata are immutable and references copied by // Object.clone() // Create a new Map object for the clone (since Map isn't Cloneable) if (mapData != null) { pi.mapData = parseData(rawData); } return pi; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.jdbc2; import java.sql.BatchUpdateException; import java.sql.SQLException; import java.util.Arrays; import java.util.List; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteJdbcDriver; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.QueryCursorImpl; import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode; import org.apache.ignite.internal.processors.cache.query.SqlFieldsQueryEx; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.lang.IgniteCallable; import org.apache.ignite.resources.IgniteInstanceResource; import static java.sql.Statement.SUCCESS_NO_INFO; import static org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode.createJdbcSqlException; /** * Task for SQL batched update statements execution through {@link IgniteJdbcDriver}. */ class JdbcBatchUpdateTask implements IgniteCallable<int[]> { /** Serial version uid. */ private static final long serialVersionUID = 0L; /** Ignite. */ @IgniteInstanceResource private Ignite ignite; /** Cache name. */ private final String cacheName; /** Schema name. */ private final String schemaName; /** SQL command for argument batching. */ private final String sql; /** Batch of statements. */ private final List<String> sqlBatch; /** Batch of arguments. */ private final List<List<Object>> batchArgs; /** Fetch size. */ private final int fetchSize; /** Local execution flag. */ private final boolean loc; /** Local query flag. */ private final boolean locQry; /** Collocated query flag. */ private final boolean collocatedQry; /** Distributed joins flag. */ private final boolean distributedJoins; /** * @param ignite Ignite. * @param cacheName Cache name. * @param schemaName Schema name. * @param sql SQL query. {@code null} in case of statement batching. * @param sqlBatch Batch of SQL statements. {@code null} in case of parameter batching. * @param batchArgs Batch of SQL parameters. {@code null} in case of statement batching. * @param loc Local execution flag. * @param fetchSize Fetch size. * @param locQry Local query flag. * @param collocatedQry Collocated query flag. * @param distributedJoins Distributed joins flag. */ public JdbcBatchUpdateTask(Ignite ignite, String cacheName, String schemaName, String sql, List<String> sqlBatch, List<List<Object>> batchArgs, boolean loc, int fetchSize, boolean locQry, boolean collocatedQry, boolean distributedJoins) { this.ignite = ignite; this.cacheName = cacheName; this.schemaName = schemaName; this.sql = sql; this.sqlBatch = sqlBatch; this.batchArgs = batchArgs; this.fetchSize = fetchSize; this.loc = loc; this.locQry = locQry; this.collocatedQry = collocatedQry; this.distributedJoins = distributedJoins; assert (!F.isEmpty(sql) && !F.isEmpty(batchArgs)) ^ !F.isEmpty(sqlBatch); } /** {@inheritDoc} */ @Override public int[] call() throws Exception { IgniteCache<?, ?> cache = ignite.cache(cacheName); // Don't create caches on server nodes in order to avoid of data rebalancing. boolean start = ignite.configuration().isClientMode(); if (cache == null && cacheName == null) cache = ((IgniteKernal)ignite).context().cache().getOrStartPublicCache(start, !loc && locQry); if (cache == null) { if (cacheName == null) { throw createJdbcSqlException("Failed to execute query. No suitable caches found.", IgniteQueryErrorCode.CACHE_NOT_FOUND); } else { throw createJdbcSqlException("Cache not found [cacheName=" + cacheName + ']', IgniteQueryErrorCode.CACHE_NOT_FOUND); } } int batchSize = F.isEmpty(sql) ? sqlBatch.size() : batchArgs.size(); int[] updCntrs = new int[batchSize]; int idx = 0; try { if (F.isEmpty(sql)) { for (; idx < batchSize; idx++) updCntrs[idx] = doSingleUpdate(cache, sqlBatch.get(idx), null); } else { for (; idx < batchSize; idx++) updCntrs[idx] = doSingleUpdate(cache, sql, batchArgs.get(idx)); } } catch (Exception ex) { throw new BatchUpdateException(Arrays.copyOf(updCntrs, idx), ex); } return updCntrs; } /** * Performs update. * * @param cache Cache. * @param sqlText SQL text. * @param args Parameters. * @return Update counter. * @throws SQLException If failed. */ private Integer doSingleUpdate(IgniteCache<?, ?> cache, String sqlText, List<Object> args) throws SQLException { SqlFieldsQuery qry = new SqlFieldsQueryEx(sqlText, false); qry.setPageSize(fetchSize); qry.setLocal(locQry); qry.setCollocated(collocatedQry); qry.setDistributedJoins(distributedJoins); qry.setSchema(schemaName); qry.setArgs(args == null ? null : args.toArray()); QueryCursorImpl<List<?>> qryCursor = (QueryCursorImpl<List<?>>)cache.withKeepBinary().query(qry); if (qryCursor.isQuery()) { throw createJdbcSqlException(getError("Query produced result set", qry), IgniteQueryErrorCode.STMT_TYPE_MISMATCH); } List<List<?>> rows = qryCursor.getAll(); if (F.isEmpty(rows)) return SUCCESS_NO_INFO; if (rows.size() != 1) throw new SQLException(getError("Expected single row for update operation result", qry)); List<?> row = rows.get(0); if (F.isEmpty(row) || row.size() != 1) throw new SQLException(getError("Expected row size of 1 for update operation", qry)); Object objRes = row.get(0); if (!(objRes instanceof Long)) throw new SQLException(getError("Unexpected update result type", qry)); Long longRes = (Long)objRes; if (longRes > Integer.MAX_VALUE) { IgniteLogger log = ignite.log(); if (log != null) log.warning(getError("Query updated row counter (" + longRes + ") exceeds integer range", qry)); return Integer.MAX_VALUE; } return longRes.intValue(); } /** * Formats error message with query details. * * @param msg Error message. * @param qry Query. * @return Result. */ private String getError(String msg, SqlFieldsQuery qry) { return msg + " [qry='" + qry.getSql() + "', params=" + Arrays.deepToString(qry.getArgs()) + ']'; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.http.impl.nio; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.charset.StandardCharsets; import org.apache.hc.core5.http.WritableByteChannelMock; import org.apache.hc.core5.http.impl.BasicHttpTransportMetrics; import org.apache.hc.core5.http.nio.SessionOutputBuffer; import org.apache.hc.core5.util.CharArrayBuffer; import org.junit.jupiter.api.AfterEach;; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; /** * Simple tests for {@link LengthDelimitedEncoder}. */ public class TestLengthDelimitedEncoder { private File tmpfile; protected File createTempFile() throws IOException { this.tmpfile = File.createTempFile("testFile", ".txt"); return this.tmpfile; } @AfterEach public void deleteTempFile() { if (this.tmpfile != null && this.tmpfile.exists()) { this.tmpfile.delete(); } } @Test public void testBasicCoding() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); encoder.write(CodecTestUtils.wrap("stuff;")); encoder.write(CodecTestUtils.wrap("more stuff")); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); Assertions.assertEquals("[content length: 16; pos: 16; completed: true]", encoder.toString()); } @Test public void testCodingBeyondContentLimit() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); encoder.write(CodecTestUtils.wrap("stuff;")); encoder.write(CodecTestUtils.wrap("more stuff; and a lot more stuff")); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); } @Test public void testCodingEmptyBuffer() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); encoder.write(CodecTestUtils.wrap("stuff;")); final ByteBuffer empty = ByteBuffer.allocate(100); empty.flip(); encoder.write(empty); encoder.write(null); encoder.write(CodecTestUtils.wrap("more stuff")); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); } @Test public void testCodingCompleted() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 5); encoder.write(CodecTestUtils.wrap("stuff")); Assertions.assertThrows(IllegalStateException.class, () -> encoder.write(CodecTestUtils.wrap("more stuff"))); } @Test public void testInvalidConstructor() { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); Assertions.assertThrows(NullPointerException.class, () -> new LengthDelimitedEncoder(null, null, null, 10)); Assertions.assertThrows(NullPointerException.class, () -> new LengthDelimitedEncoder(channel, null, null, 10)); Assertions.assertThrows(NullPointerException.class, () -> new LengthDelimitedEncoder(channel, outbuf, null, 10)); Assertions.assertThrows(IllegalArgumentException.class, () -> new LengthDelimitedEncoder(channel, outbuf, metrics, -10)); } @Test public void testCodingBeyondContentLimitFromFile() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); createTempFile(); RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw"); try { testfile.write("stuff;".getBytes(StandardCharsets.US_ASCII)); testfile.write("more stuff; and a lot more stuff".getBytes(StandardCharsets.US_ASCII)); } finally { testfile.close(); } testfile = new RandomAccessFile(this.tmpfile, "rw"); try { final FileChannel fchannel = testfile.getChannel(); encoder.transfer(fchannel, 0, 20); } finally { testfile.close(); } final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); } @Test public void testCodingEmptyFile() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); encoder.write(CodecTestUtils.wrap("stuff;")); //Create an empty file createTempFile(); RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw"); testfile.close(); testfile = new RandomAccessFile(this.tmpfile, "rw"); try { final FileChannel fchannel = testfile.getChannel(); encoder.transfer(fchannel, 0, 20); encoder.write(CodecTestUtils.wrap("more stuff")); } finally { testfile.close(); } final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); } @Test public void testCodingCompletedFromFile() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 5); encoder.write(CodecTestUtils.wrap("stuff")); createTempFile(); try (final RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw")) { testfile.write("more stuff".getBytes(StandardCharsets.US_ASCII)); } try (final FileChannel fchannel = new RandomAccessFile(this.tmpfile, "rw").getChannel()) { Assertions.assertThrows(IllegalStateException.class, () -> encoder.transfer(fchannel, 0, 10)); } } @Test public void testCodingFromFileSmaller() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); createTempFile(); RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw"); try { testfile.write("stuff;".getBytes(StandardCharsets.US_ASCII)); testfile.write("more stuff".getBytes(StandardCharsets.US_ASCII)); } finally { testfile.close(); } testfile = new RandomAccessFile(this.tmpfile, "rw"); try { final FileChannel fchannel = testfile.getChannel(); encoder.transfer(fchannel, 0, 20); } finally { testfile.close(); } final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("stuff;more stuff", s); } @Test public void testCodingFromFileFlushBuffer() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); final CharArrayBuffer chbuffer = new CharArrayBuffer(16); chbuffer.append("header"); outbuf.writeLine(chbuffer); createTempFile(); RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw"); try { testfile.write("stuff;".getBytes(StandardCharsets.US_ASCII)); testfile.write("more stuff".getBytes(StandardCharsets.US_ASCII)); } finally { testfile.close(); } testfile = new RandomAccessFile(this.tmpfile, "rw"); try { final FileChannel fchannel = testfile.getChannel(); encoder.transfer(fchannel, 0, 20); } finally { testfile.close(); } final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertTrue(encoder.isCompleted()); Assertions.assertEquals("header\r\nstuff;more stuff", s); } @Test public void testCodingFromFileChannelSaturated() throws Exception { final WritableByteChannelMock channel = new WritableByteChannelMock(64, 4); final SessionOutputBuffer outbuf = new SessionOutputBufferImpl(1024, 128); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder( channel, outbuf, metrics, 16); final CharArrayBuffer chbuffer = new CharArrayBuffer(16); chbuffer.append("header"); outbuf.writeLine(chbuffer); createTempFile(); RandomAccessFile testfile = new RandomAccessFile(this.tmpfile, "rw"); try { testfile.write("stuff".getBytes(StandardCharsets.US_ASCII)); } finally { testfile.close(); } testfile = new RandomAccessFile(this.tmpfile, "rw"); try { final FileChannel fchannel = testfile.getChannel(); encoder.transfer(fchannel, 0, 20); encoder.transfer(fchannel, 0, 20); } finally { testfile.close(); } final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertFalse(encoder.isCompleted()); Assertions.assertEquals("head", s); } @Test public void testCodingNoFragmentBuffering() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final CharArrayBuffer chbuffer = new CharArrayBuffer(16); chbuffer.append("header"); outbuf.writeLine(chbuffer); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 0); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Mockito.verify(channel, Mockito.times(2)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.never()).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(1)).flush(channel); Assertions.assertEquals(13, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("header\r\nstuff", s); } @Test public void testCodingFragmentBuffering() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final CharArrayBuffer chbuffer = new CharArrayBuffer(16); chbuffer.append("header"); outbuf.writeLine(chbuffer); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 32); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Mockito.verify(channel, Mockito.never()).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(1)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.never()).flush(channel); Assertions.assertEquals(0, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("header\r\nstuff", s); } @Test public void testCodingFragmentBufferingMultipleFragments() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 32); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(10, encoder.write(CodecTestUtils.wrap("more stuff"))); Mockito.verify(channel, Mockito.never()).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.never()).flush(channel); Assertions.assertEquals(0, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff-more stuff", s); } @Test public void testCodingFragmentBufferingMultipleFragmentsBeyondContentLimit() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 16, 32); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(10, encoder.write(CodecTestUtils.wrap("more stuff; and a lot more stuff"))); Mockito.verify(channel, Mockito.never()).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.never()).flush(channel); Assertions.assertEquals(0, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff-more stuff", s); } @Test public void testCodingFragmentBufferingLargeFragment() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final CharArrayBuffer chbuffer = new CharArrayBuffer(16); chbuffer.append("header"); outbuf.writeLine(chbuffer); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 2); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Mockito.verify(channel, Mockito.times(2)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.never()).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(1)).flush(channel); Assertions.assertEquals(13, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("header\r\nstuff", s); } @Test public void testCodingFragmentBufferingTinyFragments() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 1); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(10, encoder.write(CodecTestUtils.wrap("more stuff"))); Mockito.verify(channel, Mockito.times(5)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(3)).flush(channel); Assertions.assertEquals(18, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff---more stuff", s); } @Test public void testCodingFragmentBufferingTinyFragments2() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 2); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(10, encoder.write(CodecTestUtils.wrap("more stuff"))); Mockito.verify(channel, Mockito.times(4)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(2)).flush(channel); Assertions.assertEquals(18, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff---more stuff", s); } @Test public void testCodingFragmentBufferingTinyFragments3() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 3); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(2, encoder.write(CodecTestUtils.wrap("--"))); Assertions.assertEquals(10, encoder.write(CodecTestUtils.wrap("more stuff"))); Mockito.verify(channel, Mockito.times(4)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(5)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(2)).flush(channel); Assertions.assertEquals(21, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff------more stuff", s); } @Test public void testCodingFragmentBufferingBufferFlush() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 8); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(6, encoder.write(CodecTestUtils.wrap("-stuff"))); Mockito.verify(channel, Mockito.times(1)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(1)).flush(channel); Assertions.assertEquals(8, metrics.getBytesTransferred()); Assertions.assertEquals(3, outbuf.length()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff-stuff", s); } @Test public void testCodingFragmentBufferingBufferFlush2() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 8); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(16, encoder.write(CodecTestUtils.wrap("-much more stuff"))); Mockito.verify(channel, Mockito.times(2)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(1)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(1)).flush(channel); Assertions.assertEquals(21, metrics.getBytesTransferred()); Assertions.assertEquals(0, outbuf.length()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff-much more stuff", s); } @Test public void testCodingFragmentBufferingChannelSaturated() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64, 8)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 3); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(0, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(0, encoder.write(CodecTestUtils.wrap("more stuff"))); Mockito.verify(channel, Mockito.times(5)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(6)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(4)).flush(channel); Assertions.assertEquals(8, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff---", s); Assertions.assertEquals(3, outbuf.length()); } @Test public void testCodingFragmentBufferingChannelSaturated2() throws Exception { final WritableByteChannelMock channel = Mockito.spy(new WritableByteChannelMock(64, 8)); final SessionOutputBuffer outbuf = Mockito.spy(new SessionOutputBufferImpl(1024, 128)); final BasicHttpTransportMetrics metrics = new BasicHttpTransportMetrics(); final LengthDelimitedEncoder encoder = new LengthDelimitedEncoder(channel, outbuf, metrics, 100, 8); Assertions.assertEquals(5, encoder.write(CodecTestUtils.wrap("stuff"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("-"))); Assertions.assertEquals(1, encoder.write(CodecTestUtils.wrap("much more stuff"))); Mockito.verify(channel, Mockito.times(3)).write(ArgumentMatchers.any()); Mockito.verify(outbuf, Mockito.times(3)).write(ArgumentMatchers.<ByteBuffer>any()); Mockito.verify(outbuf, Mockito.times(1)).flush(channel); Assertions.assertEquals(8, metrics.getBytesTransferred()); outbuf.flush(channel); final String s = channel.dump(StandardCharsets.US_ASCII); Assertions.assertEquals("stuff--m", s); Assertions.assertEquals(0, outbuf.length()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; import io.prestosql.connector.CatalogName; import io.prestosql.metadata.SessionPropertyManager; import io.prestosql.spi.QueryId; import io.prestosql.spi.security.BasicPrincipal; import io.prestosql.spi.security.Identity; import io.prestosql.spi.security.SelectedRole; import io.prestosql.spi.session.ResourceEstimates; import io.prestosql.spi.type.TimeZoneKey; import io.prestosql.sql.SqlPath; import io.prestosql.transaction.TransactionId; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import static java.util.Collections.emptyMap; import static java.util.Objects.requireNonNull; public final class SessionRepresentation { private final String queryId; private final Optional<TransactionId> transactionId; private final boolean clientTransactionSupport; private final String user; private final Optional<String> principal; private final Optional<String> source; private final Optional<String> catalog; private final Optional<String> schema; private final SqlPath path; private final Optional<String> traceToken; private final TimeZoneKey timeZoneKey; private final Locale locale; private final Optional<String> remoteUserAddress; private final Optional<String> userAgent; private final Optional<String> clientInfo; private final Set<String> clientTags; private final Set<String> clientCapabilities; private final long startTime; private final ResourceEstimates resourceEstimates; private final Map<String, String> systemProperties; private final Map<CatalogName, Map<String, String>> catalogProperties; private final Map<String, Map<String, String>> unprocessedCatalogProperties; private final Map<String, SelectedRole> roles; private final Map<String, String> preparedStatements; @JsonCreator public SessionRepresentation( @JsonProperty("queryId") String queryId, @JsonProperty("transactionId") Optional<TransactionId> transactionId, @JsonProperty("clientTransactionSupport") boolean clientTransactionSupport, @JsonProperty("user") String user, @JsonProperty("principal") Optional<String> principal, @JsonProperty("source") Optional<String> source, @JsonProperty("catalog") Optional<String> catalog, @JsonProperty("schema") Optional<String> schema, @JsonProperty("path") SqlPath path, @JsonProperty("traceToken") Optional<String> traceToken, @JsonProperty("timeZoneKey") TimeZoneKey timeZoneKey, @JsonProperty("locale") Locale locale, @JsonProperty("remoteUserAddress") Optional<String> remoteUserAddress, @JsonProperty("userAgent") Optional<String> userAgent, @JsonProperty("clientInfo") Optional<String> clientInfo, @JsonProperty("clientTags") Set<String> clientTags, @JsonProperty("clientCapabilities") Set<String> clientCapabilities, @JsonProperty("resourceEstimates") ResourceEstimates resourceEstimates, @JsonProperty("startTime") long startTime, @JsonProperty("systemProperties") Map<String, String> systemProperties, @JsonProperty("catalogProperties") Map<CatalogName, Map<String, String>> catalogProperties, @JsonProperty("unprocessedCatalogProperties") Map<String, Map<String, String>> unprocessedCatalogProperties, @JsonProperty("roles") Map<String, SelectedRole> roles, @JsonProperty("preparedStatements") Map<String, String> preparedStatements) { this.queryId = requireNonNull(queryId, "queryId is null"); this.transactionId = requireNonNull(transactionId, "transactionId is null"); this.clientTransactionSupport = clientTransactionSupport; this.user = requireNonNull(user, "user is null"); this.principal = requireNonNull(principal, "principal is null"); this.source = requireNonNull(source, "source is null"); this.catalog = requireNonNull(catalog, "catalog is null"); this.schema = requireNonNull(schema, "schema is null"); this.path = requireNonNull(path, "path is null"); this.traceToken = requireNonNull(traceToken, "traceToken is null"); this.timeZoneKey = requireNonNull(timeZoneKey, "timeZoneKey is null"); this.locale = requireNonNull(locale, "locale is null"); this.remoteUserAddress = requireNonNull(remoteUserAddress, "remoteUserAddress is null"); this.userAgent = requireNonNull(userAgent, "userAgent is null"); this.clientInfo = requireNonNull(clientInfo, "clientInfo is null"); this.clientTags = requireNonNull(clientTags, "clientTags is null"); this.clientCapabilities = requireNonNull(clientCapabilities, "clientCapabilities is null"); this.resourceEstimates = requireNonNull(resourceEstimates, "resourceEstimates is null"); this.startTime = startTime; this.systemProperties = ImmutableMap.copyOf(systemProperties); this.roles = ImmutableMap.copyOf(roles); this.preparedStatements = ImmutableMap.copyOf(preparedStatements); ImmutableMap.Builder<CatalogName, Map<String, String>> catalogPropertiesBuilder = ImmutableMap.builder(); for (Entry<CatalogName, Map<String, String>> entry : catalogProperties.entrySet()) { catalogPropertiesBuilder.put(entry.getKey(), ImmutableMap.copyOf(entry.getValue())); } this.catalogProperties = catalogPropertiesBuilder.build(); ImmutableMap.Builder<String, Map<String, String>> unprocessedCatalogPropertiesBuilder = ImmutableMap.builder(); for (Entry<String, Map<String, String>> entry : unprocessedCatalogProperties.entrySet()) { unprocessedCatalogPropertiesBuilder.put(entry.getKey(), ImmutableMap.copyOf(entry.getValue())); } this.unprocessedCatalogProperties = unprocessedCatalogPropertiesBuilder.build(); } @JsonProperty public String getQueryId() { return queryId; } @JsonProperty public Optional<TransactionId> getTransactionId() { return transactionId; } @JsonProperty public boolean isClientTransactionSupport() { return clientTransactionSupport; } @JsonProperty public String getUser() { return user; } @JsonProperty public Optional<String> getPrincipal() { return principal; } @JsonProperty public Optional<String> getSource() { return source; } @JsonProperty public Optional<String> getTraceToken() { return traceToken; } @JsonProperty public Optional<String> getCatalog() { return catalog; } @JsonProperty public Optional<String> getSchema() { return schema; } @JsonProperty public SqlPath getPath() { return path; } @JsonProperty public TimeZoneKey getTimeZoneKey() { return timeZoneKey; } @JsonProperty public Locale getLocale() { return locale; } @JsonProperty public Optional<String> getRemoteUserAddress() { return remoteUserAddress; } @JsonProperty public Optional<String> getUserAgent() { return userAgent; } @JsonProperty public Optional<String> getClientInfo() { return clientInfo; } @JsonProperty public Set<String> getClientTags() { return clientTags; } @JsonProperty public Set<String> getClientCapabilities() { return clientCapabilities; } @JsonProperty public long getStartTime() { return startTime; } @JsonProperty public ResourceEstimates getResourceEstimates() { return resourceEstimates; } @JsonProperty public Map<String, String> getSystemProperties() { return systemProperties; } @JsonProperty public Map<CatalogName, Map<String, String>> getCatalogProperties() { return catalogProperties; } @JsonProperty public Map<String, Map<String, String>> getUnprocessedCatalogProperties() { return unprocessedCatalogProperties; } @JsonProperty public Map<String, SelectedRole> getRoles() { return roles; } @JsonProperty public Map<String, String> getPreparedStatements() { return preparedStatements; } public Session toSession(SessionPropertyManager sessionPropertyManager) { return toSession(sessionPropertyManager, emptyMap()); } public Session toSession(SessionPropertyManager sessionPropertyManager, Map<String, String> extraCredentials) { return new Session( new QueryId(queryId), transactionId, clientTransactionSupport, new Identity(user, principal.map(BasicPrincipal::new), roles, extraCredentials), source, catalog, schema, path, traceToken, timeZoneKey, locale, remoteUserAddress, userAgent, clientInfo, clientTags, clientCapabilities, resourceEstimates, startTime, systemProperties, catalogProperties, unprocessedCatalogProperties, sessionPropertyManager, preparedStatements); } }
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.admin; import com.google.gerrit.client.Dispatcher; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.VoidResult; import com.google.gerrit.client.groups.GroupApi; import com.google.gerrit.client.info.AccountInfo; import com.google.gerrit.client.info.GroupInfo; import com.google.gerrit.client.rpc.GerritCallback; import com.google.gerrit.client.rpc.Natives; import com.google.gerrit.client.ui.AccountGroupSuggestOracle; import com.google.gerrit.client.ui.AccountLinkPanel; import com.google.gerrit.client.ui.AccountSuggestOracle; import com.google.gerrit.client.ui.AddMemberBox; import com.google.gerrit.client.ui.FancyFlexTable; import com.google.gerrit.client.ui.Hyperlink; import com.google.gerrit.client.ui.SmallHeading; import com.google.gerrit.common.Nullable; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.ui.Anchor; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.CheckBox; import com.google.gwt.user.client.ui.FlexTable.FlexCellFormatter; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Panel; import java.util.Comparator; import java.util.HashSet; import java.util.List; public class AccountGroupMembersScreen extends AccountGroupScreen { private MemberTable members; private IncludeTable includes; private Panel memberPanel; private AddMemberBox addMemberBox; private Button delMember; private Panel includePanel; private AddMemberBox addIncludeBox; private Button delInclude; private FlowPanel noMembersInfo; private AccountGroupSuggestOracle accountGroupSuggestOracle; public AccountGroupMembersScreen(GroupInfo toShow, String token) { super(toShow, token); } @Override protected void onInitUI() { super.onInitUI(); initMemberList(); initIncludeList(); initNoMembersInfo(); } private void enableForm(boolean canModify) { addMemberBox.setEnabled(canModify); members.setEnabled(canModify); addIncludeBox.setEnabled(canModify); includes.setEnabled(canModify); } private void initMemberList() { addMemberBox = new AddMemberBox( AdminConstants.I.buttonAddGroupMember(), AdminConstants.I.defaultAccountName(), new AccountSuggestOracle()); addMemberBox.addClickHandler( new ClickHandler() { @Override public void onClick(ClickEvent event) { doAddNewMember(); } }); members = new MemberTable(); members.addStyleName(Gerrit.RESOURCES.css().groupMembersTable()); delMember = new Button(AdminConstants.I.buttonDeleteGroupMembers()); delMember.addClickHandler( new ClickHandler() { @Override public void onClick(ClickEvent event) { members.deleteChecked(); } }); memberPanel = new FlowPanel(); memberPanel.add(new SmallHeading(AdminConstants.I.headingMembers())); memberPanel.add(addMemberBox); memberPanel.add(members); memberPanel.add(delMember); add(memberPanel); } private void initIncludeList() { accountGroupSuggestOracle = new AccountGroupSuggestOracle(); addIncludeBox = new AddMemberBox( AdminConstants.I.buttonAddIncludedGroup(), AdminConstants.I.defaultAccountGroupName(), accountGroupSuggestOracle); addIncludeBox.addClickHandler( new ClickHandler() { @Override public void onClick(ClickEvent event) { doAddNewInclude(); } }); includes = new IncludeTable(); includes.addStyleName(Gerrit.RESOURCES.css().groupIncludesTable()); delInclude = new Button(AdminConstants.I.buttonDeleteIncludedGroup()); delInclude.addClickHandler( new ClickHandler() { @Override public void onClick(ClickEvent event) { includes.deleteChecked(); } }); includePanel = new FlowPanel(); includePanel.add(new SmallHeading(AdminConstants.I.headingIncludedGroups())); includePanel.add(addIncludeBox); includePanel.add(includes); includePanel.add(delInclude); add(includePanel); } private void initNoMembersInfo() { noMembersInfo = new FlowPanel(); noMembersInfo.setVisible(false); noMembersInfo.add(new SmallHeading(AdminConstants.I.noMembersInfo())); add(noMembersInfo); } @Override protected void display(GroupInfo group, boolean canModify) { if (AccountGroup.isInternalGroup(group.getGroupUUID())) { members.display(Natives.asList(group.members())); includes.display(Natives.asList(group.includes())); } else { memberPanel.setVisible(false); includePanel.setVisible(false); noMembersInfo.setVisible(true); } enableForm(canModify); delMember.setVisible(canModify); delInclude.setVisible(canModify); } void doAddNewMember() { final String nameEmail = addMemberBox.getText(); if (nameEmail.length() == 0) { return; } addMemberBox.setEnabled(false); GroupApi.addMember( getGroupUUID(), nameEmail, new GerritCallback<AccountInfo>() { @Override public void onSuccess(AccountInfo memberInfo) { addMemberBox.setEnabled(true); addMemberBox.setText(""); members.insert(memberInfo); } @Override public void onFailure(Throwable caught) { addMemberBox.setEnabled(true); super.onFailure(caught); } }); } void doAddNewInclude() { String groupName = addIncludeBox.getText(); if (groupName.length() == 0) { return; } AccountGroup.UUID uuid = accountGroupSuggestOracle.getUUID(groupName); if (uuid == null) { return; } addIncludeBox.setEnabled(false); GroupApi.addIncludedGroup( getGroupUUID(), uuid.get(), new GerritCallback<GroupInfo>() { @Override public void onSuccess(GroupInfo result) { addIncludeBox.setEnabled(true); addIncludeBox.setText(""); includes.insert(result); } @Override public void onFailure(Throwable caught) { addIncludeBox.setEnabled(true); super.onFailure(caught); } }); } private class MemberTable extends FancyFlexTable<AccountInfo> { private boolean enabled = true; MemberTable() { table.setText(0, 2, AdminConstants.I.columnMember()); table.setText(0, 3, AdminConstants.I.columnEmailAddress()); final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.addStyleName(0, 1, Gerrit.RESOURCES.css().iconHeader()); fmt.addStyleName(0, 2, Gerrit.RESOURCES.css().dataHeader()); fmt.addStyleName(0, 3, Gerrit.RESOURCES.css().dataHeader()); } void setEnabled(boolean enabled) { this.enabled = enabled; for (int row = 1; row < table.getRowCount(); row++) { final AccountInfo i = getRowItem(row); if (i != null) { ((CheckBox) table.getWidget(row, 1)).setEnabled(enabled); } } } void deleteChecked() { final HashSet<Integer> ids = new HashSet<>(); for (int row = 1; row < table.getRowCount(); row++) { final AccountInfo i = getRowItem(row); if (i != null && ((CheckBox) table.getWidget(row, 1)).getValue()) { ids.add(i._accountId()); } } if (!ids.isEmpty()) { GroupApi.removeMembers( getGroupUUID(), ids, new GerritCallback<VoidResult>() { @Override public void onSuccess(VoidResult result) { for (int row = 1; row < table.getRowCount(); ) { final AccountInfo i = getRowItem(row); if (i != null && ids.contains(i._accountId())) { table.removeRow(row); } else { row++; } } } }); } } void display(List<AccountInfo> result) { while (1 < table.getRowCount()) { table.removeRow(table.getRowCount() - 1); } for (AccountInfo i : result) { final int row = table.getRowCount(); table.insertRow(row); applyDataRowStyle(row); populate(row, i); } } void insert(AccountInfo info) { Comparator<AccountInfo> c = new Comparator<AccountInfo>() { @Override public int compare(AccountInfo a, AccountInfo b) { int cmp = nullToEmpty(a.name()).compareTo(nullToEmpty(b.name())); if (cmp != 0) { return cmp; } cmp = nullToEmpty(a.email()).compareTo(nullToEmpty(b.email())); if (cmp != 0) { return cmp; } return a._accountId() - b._accountId(); } public String nullToEmpty(String str) { return str == null ? "" : str; } }; int insertPos = getInsertRow(c, info); if (insertPos >= 0) { table.insertRow(insertPos); applyDataRowStyle(insertPos); populate(insertPos, info); } } void populate(int row, AccountInfo i) { CheckBox checkBox = new CheckBox(); table.setWidget(row, 1, checkBox); checkBox.setEnabled(enabled); table.setWidget(row, 2, AccountLinkPanel.create(i)); table.setText(row, 3, i.email()); final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.addStyleName(row, 1, Gerrit.RESOURCES.css().iconCell()); fmt.addStyleName(row, 2, Gerrit.RESOURCES.css().dataCell()); fmt.addStyleName(row, 3, Gerrit.RESOURCES.css().dataCell()); setRowItem(row, i); } } private class IncludeTable extends FancyFlexTable<GroupInfo> { private boolean enabled = true; IncludeTable() { table.setText(0, 2, AdminConstants.I.columnGroupName()); table.setText(0, 3, AdminConstants.I.columnGroupDescription()); final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.addStyleName(0, 1, Gerrit.RESOURCES.css().iconHeader()); fmt.addStyleName(0, 2, Gerrit.RESOURCES.css().dataHeader()); fmt.addStyleName(0, 3, Gerrit.RESOURCES.css().dataHeader()); } void setEnabled(boolean enabled) { this.enabled = enabled; for (int row = 1; row < table.getRowCount(); row++) { final GroupInfo i = getRowItem(row); if (i != null) { ((CheckBox) table.getWidget(row, 1)).setEnabled(enabled); } } } void deleteChecked() { final HashSet<AccountGroup.UUID> ids = new HashSet<>(); for (int row = 1; row < table.getRowCount(); row++) { final GroupInfo i = getRowItem(row); if (i != null && ((CheckBox) table.getWidget(row, 1)).getValue()) { ids.add(i.getGroupUUID()); } } if (!ids.isEmpty()) { GroupApi.removeIncludedGroups( getGroupUUID(), ids, new GerritCallback<VoidResult>() { @Override public void onSuccess(VoidResult result) { for (int row = 1; row < table.getRowCount(); ) { final GroupInfo i = getRowItem(row); if (i != null && ids.contains(i.getGroupUUID())) { table.removeRow(row); } else { row++; } } } }); } } void display(List<GroupInfo> list) { while (1 < table.getRowCount()) { table.removeRow(table.getRowCount() - 1); } for (GroupInfo i : list) { final int row = table.getRowCount(); table.insertRow(row); applyDataRowStyle(row); populate(row, i); } } void insert(GroupInfo info) { Comparator<GroupInfo> c = new Comparator<GroupInfo>() { @Override public int compare(GroupInfo a, GroupInfo b) { int cmp = nullToEmpty(a.name()).compareTo(nullToEmpty(b.name())); if (cmp != 0) { return cmp; } return a.getGroupUUID().compareTo(b.getGroupUUID()); } private String nullToEmpty(@Nullable String str) { return (str == null) ? "" : str; } }; int insertPos = getInsertRow(c, info); if (insertPos >= 0) { table.insertRow(insertPos); applyDataRowStyle(insertPos); populate(insertPos, info); } } void populate(int row, GroupInfo i) { final FlexCellFormatter fmt = table.getFlexCellFormatter(); AccountGroup.UUID uuid = i.getGroupUUID(); CheckBox checkBox = new CheckBox(); table.setWidget(row, 1, checkBox); checkBox.setEnabled(enabled); if (AccountGroup.isInternalGroup(uuid)) { table.setWidget(row, 2, new Hyperlink(i.name(), Dispatcher.toGroup(uuid))); fmt.getElement(row, 2).setTitle(null); table.setText(row, 3, i.description()); } else if (i.url() != null) { Anchor a = new Anchor(); a.setText(i.name()); a.setHref(i.url()); a.setTitle("UUID " + uuid.get()); table.setWidget(row, 2, a); fmt.getElement(row, 2).setTitle(null); } else { table.setText(row, 2, i.name()); fmt.getElement(row, 2).setTitle("UUID " + uuid.get()); } fmt.addStyleName(row, 1, Gerrit.RESOURCES.css().iconCell()); fmt.addStyleName(row, 2, Gerrit.RESOURCES.css().dataCell()); fmt.addStyleName(row, 3, Gerrit.RESOURCES.css().dataCell()); setRowItem(row, i); } } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.transport.http.bridge.filter; import static java.lang.Long.parseLong; import static java.lang.String.format; import static java.lang.String.valueOf; import static java.util.Arrays.asList; import static org.kaazing.gateway.transport.http.HttpAcceptFilter.CONTENT_LENGTH_ADJUSTMENT; import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_CACHE_CONTROL; import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_CONTENT_LENGTH; import static org.kaazing.gateway.transport.http.HttpMethod.POST; import static org.kaazing.gateway.transport.http.HttpStatus.SUCCESS_OK; import java.net.URI; import java.net.URLDecoder; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import org.apache.mina.core.filterchain.IoFilterChain; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.write.WriteRequest; import org.apache.mina.filter.codec.ProtocolDecoderException; import org.apache.mina.filter.codec.ProtocolEncoderException; import org.kaazing.gateway.transport.http.HttpAcceptSession; import org.kaazing.gateway.transport.http.HttpConnectSession; import org.kaazing.gateway.transport.http.HttpCookie; import org.kaazing.gateway.transport.http.HttpMethod; import org.kaazing.gateway.transport.http.HttpSession; import org.kaazing.gateway.transport.http.HttpStatus; import org.kaazing.gateway.transport.http.HttpVersion; import org.kaazing.gateway.transport.http.bridge.HttpHeaderNameComparator; import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage; import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage; public class HttpxeProtocolFilter extends HttpFilterAdapter<IoSession> { private static final String CONTENT_TYPE_TEXT_PLAIN_CHARSET_UTF_8 = "text/plain;charset=UTF-8"; private static final String CONTENT_TYPE_APPLICATION_OCTET_STREAM = "application/octet-stream"; private static final String CONTENT_TYPE_APPLICATION_X_MESSAGE_HTTP = "application/x-message-http"; private static final String CONTENT_TYPE_TEXT_PLAIN = "text/plain"; private static final String CONTENT_TYPE_PREFIX_TEXT = "text/"; private static final SortedSet<String> RESTRICTED_ENVELOPE_HEADERS; static { // note: restricted headers are case-insensitive (!) // see: http://www.w3.org/TR/XMLHttpRequest/#dom-xmlhttprequest-setrequestheader SortedSet<String> restrictedEnvelopeHeaders = new TreeSet<>(HttpHeaderNameComparator.INSTANCE); restrictedEnvelopeHeaders.addAll(asList("Accept-Charset", "Accept-Encoding", "Access-Control-Request-Headers", "Access-Control-Request-Method", "Connection", "Cookie", "Cookie2", "Date", "DNT", "Expect", "Host", "Keep-Alive", "Origin", "Referer", "TE", "Trailer", "Transfer-Encoding", "Upgrade", "User-Agent", "Via")); RESTRICTED_ENVELOPE_HEADERS = restrictedEnvelopeHeaders; } private static final Collection<String> ASCII_COMPATIBLE = Arrays.asList("charset=ascii", "charset=utf-8", "charset=windows-1252"); private enum Mode { CLIENT, SERVER } private final Mode mode; public HttpxeProtocolFilter(boolean client) { this.mode = client ? Mode.CLIENT : Mode.SERVER; } @Override protected void filterWriteHttpRequest(NextFilter nextFilter, IoSession session, WriteRequest writeRequest, HttpRequestMessage httpRequest) throws Exception { switch (mode) { case CLIENT: HttpConnectSession httpSession = (HttpConnectSession) session; filterWriteAndInjectHttpRequest(nextFilter, httpSession, writeRequest, httpRequest); break; default: super.filterWriteHttpRequest(nextFilter, session, writeRequest, httpRequest); break; } } @Override protected void httpRequestReceived(NextFilter nextFilter, IoSession session, HttpRequestMessage httpRequest) throws Exception { // GL.debug("http", getClass().getSimpleName() + " request received."); switch (mode) { case SERVER: HttpAcceptSession httpSession = (HttpAcceptSession)session; receiveAndExtractHttpRequest(nextFilter, httpSession, httpRequest); break; default: super.httpRequestReceived(nextFilter, session, httpRequest); break; } } @Override protected void filterWriteHttpResponse(NextFilter nextFilter, IoSession session, WriteRequest writeRequest, HttpResponseMessage httpResponse) throws Exception { switch (mode) { case SERVER: HttpAcceptSession httpSession = (HttpAcceptSession)session; filterWriteAndInjectHttpResponse(nextFilter, httpSession, writeRequest, httpResponse); break; default: super.filterWriteHttpResponse(nextFilter, session, writeRequest, httpResponse); break; } } @Override protected void httpResponseReceived(NextFilter nextFilter, IoSession session, HttpResponseMessage httpResponse) throws Exception { switch (mode) { case CLIENT: HttpSession httpSession = (HttpSession) session; receiveAndExtractHttpResponse(nextFilter, httpSession, httpResponse); break; default: super.httpResponseReceived(nextFilter, session, httpResponse); break; } } private void filterWriteAndInjectHttpRequest(NextFilter nextFilter, HttpConnectSession session, WriteRequest writeRequest, HttpRequestMessage httpRequest) throws Exception { // inject version HttpVersion version = httpRequest.getVersion(); session.setVersion(version); // inject method session.setMethod(POST); // inject requestURI URI requestURI = httpRequest.getRequestURI(); session.setRequestURI(requestURI); // inject content-type String contentType = httpRequest.getHeader("Content-Type"); String newContentType = calculateContentType(contentType); session.setWriteHeader("Content-Type", newContentType); // inject headers for (Iterator<String> iterator = httpRequest.iterateHeaderNames(); iterator.hasNext(); ) { String headerName = iterator.next(); // skip headers that must not be inserted switch (headerName.charAt(0)) { case 'a': case 'A': if ("Authorization".equalsIgnoreCase(headerName)) { continue; } break; case 'c': case 'C': if (HEADER_CONTENT_LENGTH.equalsIgnoreCase(headerName)) { IoFilterChain filterChain = session.getFilterChain(); filterChain.addFirst(CONTENT_LENGTH_ADJUSTMENT.filterName(), CONTENT_LENGTH_ADJUSTMENT.filter()); break; } else if ("Content-Type".equalsIgnoreCase(headerName)) { continue; } break; } // inject header String headerValue = httpRequest.getHeader(headerName); session.setWriteHeader(headerName, headerValue); iterator.remove(); } // inject cookies if (httpRequest.hasCookies()) { Set<HttpCookie> writeCookies = session.getWriteCookies(); for (Iterator<HttpCookie> iterator = httpRequest.iterateCookies(); iterator.hasNext(); ) { HttpCookie cookie = iterator.next(); writeCookies.add(cookie); iterator.remove(); } } nextFilter.filterWrite(session, writeRequest); } private void receiveAndExtractHttpRequest(NextFilter nextFilter, HttpAcceptSession session, HttpRequestMessage httpRequest) throws Exception { // set implicit content length httpRequest.setContentLengthImplicit(true); // validate version if (session.getVersion() != httpRequest.getVersion()) { throw new ProtocolDecoderException("HTTP version mismatch"); } // validate method if (session.getMethod() != HttpMethod.POST) { throw new ProtocolDecoderException("Unexpected HTTP method"); } // validate request URI if (!URLDecoder.decode(session.getRequestURI().toString(), "UTF-8").equals(URLDecoder.decode(httpRequest.getRequestURI().toString(), "UTF-8"))) { throw new ProtocolDecoderException("HTTP request URI mismatch"); } // validate content type String contentType = session.getReadHeader("Content-Type"); if (contentType == null) { throw new ProtocolDecoderException("Expected HTTP content-type"); } else if (!CONTENT_TYPE_APPLICATION_X_MESSAGE_HTTP.equals(contentType)) { throw new ProtocolDecoderException("Unexpected HTTP content-type"); } // validate enveloped header names for (String headerName : httpRequest.getHeaderNames()) { if (RESTRICTED_ENVELOPE_HEADERS.contains(headerName)) { throw new ProtocolDecoderException("Unsupported HTTP header(s)"); } } // extract read headers for (String readHeaderName : session.getReadHeaderNames()) { // skip headers that must not be extracted switch (readHeaderName.charAt(0)) { case 'a': case 'A': if ("Authorization".equalsIgnoreCase(readHeaderName)) { continue; } break; case 'c': case 'C': if (HEADER_CONTENT_LENGTH.equalsIgnoreCase(readHeaderName)) { switch (httpRequest.getMethod()) { case GET: case HEAD: break; default: if (!httpRequest.hasHeader(readHeaderName)) { long contentLength = parseLong(session.getReadHeader(readHeaderName)); long newContentLength = contentLength - session.getReadBytes(); httpRequest.setHeader(readHeaderName, valueOf(newContentLength)); } break; } continue; } else if ("Content-Type".equalsIgnoreCase(readHeaderName)) { continue; } break; case 'x': case 'X': if ("X-Next-Protocol".equalsIgnoreCase(readHeaderName)) { // avoid propagating next protocol header (!) continue; } break; } // extract header List<String> readHeaderValues = session.getReadHeaders(readHeaderName); if (readHeaderValues != null && !readHeaderValues.isEmpty()) { for(String headerValue : readHeaderValues) { httpRequest.addHeader(readHeaderName, headerValue); } } } // extract cookies for (HttpCookie readCookie : session.getReadCookies()) { httpRequest.addCookie(readCookie); } // propagate message nextFilter.messageReceived(session, httpRequest); } private HttpResponseMessage filterWriteAndInjectHttpResponse(NextFilter nextFilter, HttpAcceptSession session, WriteRequest writeRequest, HttpResponseMessage httpResponse) throws ProtocolEncoderException { // set implicit content length httpResponse.setContentLengthImplicit(true); httpResponse.setBlockPadding(false); // inject version HttpVersion version = httpResponse.getVersion(); session.setVersion(version); // inject status HttpStatus status = httpResponse.getStatus(); switch (status) { case CLIENT_NOT_FOUND: session.setStatus(status); if (httpResponse.hasReason()) { session.setReason(httpResponse.getReason()); } break; default: session.setStatus(SUCCESS_OK); break; } // note: this logic appears duplicated/derived from HttpMessageEncoder.encodeContentLength(...) boolean adjustContentLength = httpResponse.hasHeader(HEADER_CONTENT_LENGTH) || (httpResponse.isComplete() && !"gzip".equals(httpResponse.getHeader("Content-Encoding")) && !"chunked".equals(httpResponse.getHeader("Transfer-Encoding"))); // inject headers for (Iterator<String> iterator = httpResponse.iterateHeaderNames(); iterator.hasNext(); ) { String headerName = iterator.next(); if (headerName.length() > 0) { outer: switch (headerName.charAt(0)) { case 'a': case 'A': if (headerName.length() > 21) { switch(headerName.charAt(21)) { case 'c': case 'C': if ("Access-Control-Allow-Credentials".equalsIgnoreCase(headerName)) { break outer; } break; case 'h': case 'H': if ("Access-Control-Allow-Headers".equalsIgnoreCase(headerName)) { break outer; } break; case 'o': case 'O': if ("Access-Control-Allow-Origin".equalsIgnoreCase(headerName)) { break outer; } break; } } continue; case 'c': case 'C': if (headerName.length() > 1) { switch (headerName.charAt(1)) { case 'a': case 'A': if ("Cache-Control".equalsIgnoreCase(headerName)) { break outer; } break; case 'o': case 'O': // character 2 is same for both if (headerName.length() > 3) { switch(headerName.charAt(3)) { case 'n': case 'N': if ("Connection".equalsIgnoreCase(headerName)) { break outer; } break; case 't': case 'T': if (HEADER_CONTENT_LENGTH.equalsIgnoreCase(headerName)) { break outer; } else if ("Content-Encoding".equalsIgnoreCase(headerName)) { break outer; } else if ("Content-Type".equalsIgnoreCase(headerName)) { break; } break; } } break; } } continue; case 'd': case 'D': if ("Date".equalsIgnoreCase(headerName)) { break outer; } continue; case 'e': case 'E': if ("ETag".equalsIgnoreCase(headerName)) { break outer; } continue; case 'l': case 'L': if ("Last-Modified".equalsIgnoreCase(headerName)) { break outer; } continue; case 'p': case 'P': if ("Pragma".equalsIgnoreCase(headerName)) { break outer; } continue; case 's': case 'S': if (headerName.length() > 2) { switch (headerName.charAt(2)) { case 'r': case 'R': if ("Server".equalsIgnoreCase(headerName)) { break outer; } break; case 't': case 'T': if ("Set-Cookie".equalsIgnoreCase(headerName)) { break outer; } break; } } continue; case 't': case 'T': if ("Transfer-Encoding".equalsIgnoreCase(headerName)) { break outer; } continue; case 'x': case 'X': if ("X-Content-Type-Options".equalsIgnoreCase(headerName)) { break outer; } continue; default: continue; } } // inject header List<String> headerValues = httpResponse.getHeaderValues(headerName); session.setWriteHeaders(headerName, headerValues); iterator.remove(); } if (session.getStatus() == HttpStatus.SUCCESS_OK && httpResponse.getStatus() != HttpStatus.SUCCESS_OK && session.getWriteHeader(HEADER_CACHE_CONTROL) == null) { session.setWriteHeader(HEADER_CACHE_CONTROL, "no-cache"); } // inject cookies if (httpResponse.hasCookies()) { Set<HttpCookie> writeCookies = session.getWriteCookies(); for (Iterator<HttpCookie> iterator = httpResponse.iterateCookies(); iterator.hasNext(); ) { HttpCookie cookie = iterator.next(); writeCookies.add(cookie); iterator.remove(); } } // handle content-length if (adjustContentLength) { IoFilterChain filterChain = session.getFilterChain(); filterChain.addFirst(CONTENT_LENGTH_ADJUSTMENT.filterName(), CONTENT_LENGTH_ADJUSTMENT.filter()); } // handle content-type String contentType = httpResponse.getHeader("Content-Type"); if ( HttpContentMessageInjectionFilter.contentAutomaticallyInjectable(httpResponse.getStatus())) { contentType = null; } String newContentType = calculateContentType(contentType); session.setWriteHeader("Content-Type", newContentType); nextFilter.filterWrite(session, writeRequest); return httpResponse; } private void receiveAndExtractHttpResponse(NextFilter nextFilter, HttpSession session, HttpResponseMessage httpResponse) throws Exception { // validate version if (session.getVersion() != httpResponse.getVersion()) { throw new ProtocolDecoderException("HTTP version mismatch"); } // validate status switch (session.getStatus()) { case SUCCESS_OK: break; default: throw new ProtocolDecoderException("HTTP status mismatch"); } // extract read headers for (String readHeaderName : session.getReadHeaderNames()) { // skip headers that must not be extracted switch (readHeaderName.charAt(0)) { case 'c': case 'C': if (HEADER_CONTENT_LENGTH.equalsIgnoreCase(readHeaderName)) { if (!httpResponse.hasHeader(readHeaderName)) { long contentLength = parseLong(session.getReadHeader(readHeaderName)); long newContentLength = contentLength - session.getReadBytes(); httpResponse.setHeader(readHeaderName, valueOf(newContentLength)); } continue; } if ("Content-Type".equalsIgnoreCase(readHeaderName)) { // validate content type String contentType = session.getReadHeader("Content-Type"); String innerContentType = httpResponse.getHeader("Content-Type"); if (innerContentType != null) { if (innerContentType.startsWith(CONTENT_TYPE_PREFIX_TEXT)) { int charsetAt = innerContentType.indexOf(';'); if (charsetAt == -1) { if (CONTENT_TYPE_TEXT_PLAIN.equals(contentType) == false) { throw new ProtocolDecoderException("Inconsistent HTTP content-type"); } } else { String charset = innerContentType.substring(charsetAt + 1); if (!ASCII_COMPATIBLE.contains(charset.toLowerCase())) { throw new ProtocolEncoderException("HTTP enveloping not compatible with charset: " + charset); } if (format("%s;%s", CONTENT_TYPE_TEXT_PLAIN, charset).equals(contentType) == false) { throw new ProtocolDecoderException("Inconsistent HTTP content-type"); } } } else if (innerContentType.equals(contentType) == false) { throw new ProtocolDecoderException("Inconsistent HTTP content-type"); } } continue; } break; case 'w': case 'W': if ("WWW-Authenticate".equalsIgnoreCase(readHeaderName)) { // note: do not extract this challenge to avoid collision between http / httpxe 401s continue; } break; } // extract header String readHeaderValue = session.getReadHeader(readHeaderName); httpResponse.setHeader(readHeaderName, readHeaderValue); } // extract cookies for (HttpCookie readCookie : session.getReadCookies()) { httpResponse.addCookie(readCookie); } // propagate message nextFilter.messageReceived(session, httpResponse); } private String calculateContentType(String contentType) throws ProtocolEncoderException { if (contentType == null) { return CONTENT_TYPE_TEXT_PLAIN_CHARSET_UTF_8; } // text/???[;charset=???] => text/plain[;charset=???] if (contentType.startsWith(CONTENT_TYPE_PREFIX_TEXT)) { int charsetAt = contentType.indexOf(';'); if (charsetAt != -1) { String charsetName = contentType.substring(charsetAt + 1).trim(); if (!ASCII_COMPATIBLE.contains(charsetName.toLowerCase())) { throw new ProtocolEncoderException("HTTP enveloping not compatible with charset: " + charsetName); } return format("%s;%s", CONTENT_TYPE_TEXT_PLAIN, charsetName); } else { return CONTENT_TYPE_TEXT_PLAIN; } } // non-text => application/octet-stream return CONTENT_TYPE_APPLICATION_OCTET_STREAM; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticloadbalancing.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancing#enableAvailabilityZonesForLoadBalancer(EnableAvailabilityZonesForLoadBalancerRequest) EnableAvailabilityZonesForLoadBalancer operation}. * <p> * Adds the specified Availability Zones to the set of Availability Zones * for the specified load balancer. * </p> * <p> * The load balancer evenly distributes requests across all its * registered Availability Zones that contain instances. * </p> * <p> * For more information, see * <a href="http://docs.aws.amazon.com/ElasticLoadBalancing/latest/DeveloperGuide/US_AddLBAvailabilityZone.html"> Add Availability Zone </a> * in the <i>Elastic Load Balancing Developer Guide</i> . * </p> * * @see com.amazonaws.services.elasticloadbalancing.AmazonElasticLoadBalancing#enableAvailabilityZonesForLoadBalancer(EnableAvailabilityZonesForLoadBalancerRequest) */ public class EnableAvailabilityZonesForLoadBalancerRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * The name of the load balancer. */ private String loadBalancerName; /** * The Availability Zones. These must be in the same region as the load * balancer. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZones; /** * Default constructor for a new EnableAvailabilityZonesForLoadBalancerRequest object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public EnableAvailabilityZonesForLoadBalancerRequest() {} /** * Constructs a new EnableAvailabilityZonesForLoadBalancerRequest object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param loadBalancerName The name of the load balancer. * @param availabilityZones The Availability Zones. These must be in the * same region as the load balancer. */ public EnableAvailabilityZonesForLoadBalancerRequest(String loadBalancerName, java.util.List<String> availabilityZones) { setLoadBalancerName(loadBalancerName); setAvailabilityZones(availabilityZones); } /** * The name of the load balancer. * * @return The name of the load balancer. */ public String getLoadBalancerName() { return loadBalancerName; } /** * The name of the load balancer. * * @param loadBalancerName The name of the load balancer. */ public void setLoadBalancerName(String loadBalancerName) { this.loadBalancerName = loadBalancerName; } /** * The name of the load balancer. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param loadBalancerName The name of the load balancer. * * @return A reference to this updated object so that method calls can be chained * together. */ public EnableAvailabilityZonesForLoadBalancerRequest withLoadBalancerName(String loadBalancerName) { this.loadBalancerName = loadBalancerName; return this; } /** * The Availability Zones. These must be in the same region as the load * balancer. * * @return The Availability Zones. These must be in the same region as the load * balancer. */ public java.util.List<String> getAvailabilityZones() { if (availabilityZones == null) { availabilityZones = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); availabilityZones.setAutoConstruct(true); } return availabilityZones; } /** * The Availability Zones. These must be in the same region as the load * balancer. * * @param availabilityZones The Availability Zones. These must be in the same region as the load * balancer. */ public void setAvailabilityZones(java.util.Collection<String> availabilityZones) { if (availabilityZones == null) { this.availabilityZones = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZonesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(availabilityZones.size()); availabilityZonesCopy.addAll(availabilityZones); this.availabilityZones = availabilityZonesCopy; } /** * The Availability Zones. These must be in the same region as the load * balancer. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setAvailabilityZones(java.util.Collection)} or * {@link #withAvailabilityZones(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param availabilityZones The Availability Zones. These must be in the same region as the load * balancer. * * @return A reference to this updated object so that method calls can be chained * together. */ public EnableAvailabilityZonesForLoadBalancerRequest withAvailabilityZones(String... availabilityZones) { if (getAvailabilityZones() == null) setAvailabilityZones(new java.util.ArrayList<String>(availabilityZones.length)); for (String value : availabilityZones) { getAvailabilityZones().add(value); } return this; } /** * The Availability Zones. These must be in the same region as the load * balancer. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param availabilityZones The Availability Zones. These must be in the same region as the load * balancer. * * @return A reference to this updated object so that method calls can be chained * together. */ public EnableAvailabilityZonesForLoadBalancerRequest withAvailabilityZones(java.util.Collection<String> availabilityZones) { if (availabilityZones == null) { this.availabilityZones = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZonesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(availabilityZones.size()); availabilityZonesCopy.addAll(availabilityZones); this.availabilityZones = availabilityZonesCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getLoadBalancerName() != null) sb.append("LoadBalancerName: " + getLoadBalancerName() + ","); if (getAvailabilityZones() != null) sb.append("AvailabilityZones: " + getAvailabilityZones() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getLoadBalancerName() == null) ? 0 : getLoadBalancerName().hashCode()); hashCode = prime * hashCode + ((getAvailabilityZones() == null) ? 0 : getAvailabilityZones().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof EnableAvailabilityZonesForLoadBalancerRequest == false) return false; EnableAvailabilityZonesForLoadBalancerRequest other = (EnableAvailabilityZonesForLoadBalancerRequest)obj; if (other.getLoadBalancerName() == null ^ this.getLoadBalancerName() == null) return false; if (other.getLoadBalancerName() != null && other.getLoadBalancerName().equals(this.getLoadBalancerName()) == false) return false; if (other.getAvailabilityZones() == null ^ this.getAvailabilityZones() == null) return false; if (other.getAvailabilityZones() != null && other.getAvailabilityZones().equals(this.getAvailabilityZones()) == false) return false; return true; } @Override public EnableAvailabilityZonesForLoadBalancerRequest clone() { return (EnableAvailabilityZonesForLoadBalancerRequest) super.clone(); } }
/* * Copyright 2009 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.connection; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.util.concurrent.DefaultThreadFactory; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.io.IOException; import java.net.InetAddress; import java.security.KeyPair; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import net.tomp2p.futures.BaseFutureAdapter; import net.tomp2p.futures.FutureDone; import net.tomp2p.peers.Number160; import net.tomp2p.peers.PeerAddress; import net.tomp2p.peers.PeerSocketAddress; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Creates a peer and listens to incoming connections. The result of creating * this class is the connection bean and the peer bean. While the connection * bean holds information that can be shared, the peer bean holds information * that is unique for each peer. * * @author Thomas Bocek * */ public class PeerCreator { private static final Logger LOG = LoggerFactory.getLogger(PeerCreator.class); private final ConnectionBean connectionBean; private final PeerBean peerBean; private final EventLoopGroup workerGroup; private final EventLoopGroup bossGroup; private final boolean master; private final FutureDone<Void> futureServerDone = new FutureDone<Void>(); /** * Creates a master peer and starts UDP and TCP channels. * * @param p2pId * The id of the network * @param peerId * The id of this peer * @param keyPair * The key pair or null * @param channelServerConfiguration * The server configuration to create the channel server that is * used for listening for incoming connections * @param channelClientConfiguration * The client side configuration * @param timer * The executor service * @param sendBehavior * The sending behavior for direct messages * @throws IOException * If the startup of listening to connections failed */ public PeerCreator(final int p2pId, final Number160 peerId, final KeyPair keyPair, final ChannelServerConfiguration channelServerConfiguration, final ChannelClientConfiguration channelClientConfiguration, final ScheduledExecutorService timer, SendBehavior sendBehavior) throws IOException { //peer bean peerBean = new PeerBean(keyPair); PeerAddress self = findPeerAddress(peerId, channelClientConfiguration, channelServerConfiguration); peerBean.serverPeerAddress(self); LOG.info("Visible address to other peers: {}", self); //start server workerGroup = new NioEventLoopGroup(0, new DefaultThreadFactory(ConnectionBean.THREAD_NAME + "worker-client/server - ")); bossGroup = new NioEventLoopGroup(2, new DefaultThreadFactory(ConnectionBean.THREAD_NAME + "boss - ")); Dispatcher dispatcher = new Dispatcher(p2pId, peerBean, channelServerConfiguration.heartBeatMillis()); final ChannelServer channelServer = new ChannelServer(bossGroup, workerGroup, channelServerConfiguration, dispatcher, peerBean.peerStatusListeners(), timer); //connection bean Sender sender = new Sender(peerId, peerBean.peerStatusListeners(), channelClientConfiguration, dispatcher, sendBehavior, peerBean); Reservation reservation = new Reservation(workerGroup, channelClientConfiguration, peerBean); connectionBean = new ConnectionBean(p2pId, dispatcher, sender, channelServer, reservation, channelClientConfiguration, timer); this.master = true; } /** * Creates a slave peer that will attach itself to a master peer. * * @param parent * The parent peer * @param peerId * The id of this peer * @param keyPair * The key pair or null */ public PeerCreator(final PeerCreator parent, final Number160 peerId, final KeyPair keyPair) { this.workerGroup = parent.workerGroup; this.bossGroup = parent.bossGroup; this.connectionBean = parent.connectionBean; this.peerBean = new PeerBean(keyPair); PeerAddress self = parent.peerBean().serverPeerAddress().changePeerId(peerId); this.peerBean.serverPeerAddress(self); this.master = false; } /** * Shutdown the peer. If the peer is a master, then also the connections and * the server will be closed, otherwise its just de-registering. * * @return The future when the shutdown is complete */ public FutureDone<Void> shutdown() { if (master) { LOG.debug("Shutting down..."); } // de-register in dispatcher connectionBean.dispatcher().removeIoHandler(peerBean().serverPeerAddress().peerId()); // shutdown running tasks for this peer if (peerBean.maintenanceTask() != null) { peerBean.maintenanceTask().shutdown(); } // shutdown all children if (!master) { return futureServerDone.done(); } // shutdown the timer connectionBean.timer().shutdown(); LOG.debug("Shutting down client..."); connectionBean.reservation().shutdown().addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { connectionBean.channelServer().shutdown().addListener(new BaseFutureAdapter<FutureDone<Void>>() { @Override public void operationComplete(final FutureDone<Void> future) throws Exception { shutdownNetty(); } }); } }); // this is blocking return futureServerDone; } @SuppressWarnings({ "unchecked", "rawtypes" }) private void shutdownNetty() { workerGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS).addListener(new GenericFutureListener() { @Override public void operationComplete(final Future future) throws Exception { LOG.debug("Client / WorkerGroup shut down."); bossGroup.shutdownGracefully(0, 0, TimeUnit.SECONDS).addListener( new GenericFutureListener() { @Override public void operationComplete(final Future future) throws Exception { LOG.debug("Client / BossGroup shut down."); futureServerDone.done(); } }); } }); } /** * @return The bean that holds information that is unique for all peers */ public PeerBean peerBean() { return peerBean; } /** * @return The bean that holds information that may be shared among peers */ public ConnectionBean connectionBean() { return connectionBean; } /** * Creates the {@link PeerAddress} based on the network discovery. * * @param peerId * The id of this peer * @return The peer address of this peer * @throws IOException * If the address could not be determined */ private static PeerAddress findPeerAddress(final Number160 peerId, final ChannelClientConfiguration channelClientConfiguration, final ChannelServerConfiguration channelServerConfiguration) throws IOException { final DiscoverResults discoverResults = DiscoverNetworks.discoverInterfaces( channelClientConfiguration.bindings()); final String status = discoverResults.status(); if (LOG.isInfoEnabled()) { LOG.info("Status of external address search: " + status); } //this is just a guess and will be changed once discovery is done InetAddress outsideAddress = discoverResults.foundAddress(); if(outsideAddress == null) { throw new IOException("Not listening to anything. Maybe the binding information is wrong."); } final PeerSocketAddress peerSocketAddress = new PeerSocketAddress(outsideAddress, channelServerConfiguration. ports().tcpPort(), channelServerConfiguration.ports().udpPort()); final PeerAddress self = new PeerAddress(peerId, peerSocketAddress, null, channelServerConfiguration.isBehindFirewall(), channelServerConfiguration.isBehindFirewall(), false, false, false, false, PeerAddress.EMPTY_PEER_SOCKET_ADDRESSES); return self; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.alibaba.weex.uitest.TC_Input; import android.app.Activity; import android.app.Application; import android.app.Instrumentation; import android.content.Intent; import android.test.ActivityInstrumentationTestCase2; import android.test.TouchUtils; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.view.ViewGroup; import com.alibaba.weex.R; import com.alibaba.weex.util.ScreenShot; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.WeappJsBaseTestCase; import com.alibaba.weex.constants.Constants; import com.alibaba.weex.util.ViewUtil; import com.taobao.weex.ui.view.WXTextView; import java.io.IOException; import java.util.ArrayList; /** * Created by admin on 16/3/23. */ public class WeexUiTestCaseTcInputType extends ActivityInstrumentationTestCase2<WXPageActivity> { public final String TAG = "TestScript_Guide=="; public WeappJsBaseTestCase weappApplication; public WXPageActivity waTestPageActivity; public WXPageActivity waTestPageActivity2; public ViewGroup mViewGroup; public Application mApplication; public Instrumentation mInstrumentation; public ArrayList<View> mCaseListIndexView = new ArrayList<View>(); public WeexUiTestCaseTcInputType() { super(WXPageActivity.class); } public void setUp() throws Exception{ Log.e("TestScript_Guide", "setUp test!!"); setActivityInitialTouchMode(false); weappApplication = new WeappJsBaseTestCase(); mInstrumentation = getInstrumentation(); Intent intent = new Intent(); intent.putExtra("bundleUrl", Constants.BUNDLE_URL); launchActivityWithIntent("com.alibaba.weex", WXPageActivity.class, intent); waTestPageActivity = getActivity(); // waTestPageActivity.getIntent().getData().toString(); Log.e(TAG,"activity1=" + waTestPageActivity.toString() ); Thread.sleep(3000); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); setViewGroup(mViewGroup); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, "TC_"); addAllTargetView("TC_"); Thread.sleep(3000); } // public void testPreConditions() // { // assertNotNull(waTestPageActivity); // assertNotNull(mViewGroup); // assertNotNull(mCaseListIndexView); // // } public void testInputType(){ for(final View caseView : mCaseListIndexView){ if (((WXTextView)caseView).getText().toString().equals("TC_Input")){ Log.e(TAG, "TC_Input find"); final WXTextView inputView = (WXTextView)caseView; mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputView.requestFocus(); inputView.performClick(); } }); sleep(2000); setActivity(WXPageActivity.wxPageActivityInstance); Activity activity2 = getActivity(); Log.e(TAG, "activity2 = " + activity2.toString()); ViewGroup myGroup = (ViewGroup)(activity2.findViewById(R.id.container)); Log.e(TAG, myGroup.toString()); ArrayList<View> inputListView = new ArrayList<View>(); inputListView = ViewUtil.findViewWithText(myGroup, "TC_Input_Type"); // myGroup.findViewsWithText(inputListView, "TC_Input_Type", View.FIND_VIEWS_WITH_TEXT); Log.e(TAG, "TC_Input_Type size== " + inputListView.size()); sleep(2000); if(inputListView.size()!=0){ final WXTextView inputTypeView = (WXTextView)inputListView.get(0); mInstrumentation.runOnMainSync(new Runnable() { @Override public void run() { inputTypeView.requestFocus(); inputTypeView.performClick(); Log.e(TAG, "inputTypeView clcik!"); // screenShot("TC_Input_Type"); } }); sleep(3000); Log.e(TAG, "TC_Input_Type snap!"); screenShot("TC_Input_Type"); } } } } /** * get tc list by text * @param byText * @return * @throws InterruptedException */ public ArrayList<View> getTestCaseListViewByText(String byText) throws InterruptedException { Log.e("TestScript_Guide", "byText ==" + byText); if(TextUtils.isEmpty(byText)){ return null; } ArrayList<View> outViews = new ArrayList<View>(); mViewGroup.findViewsWithText(outViews, byText, View.FIND_VIEWS_WITH_TEXT); for (View view : outViews){ String viewText = ((WXTextView)view).getText().toString(); Log.e(TAG, "viewText ==" + viewText); } return outViews; } /** * findMyCaseByText */ public View findMyCaseByText(String caseText){ if (mCaseListIndexView.size() == 0) return null; WXTextView view = null; for(int i=0; i<mCaseListIndexView.size();i++){ view = (WXTextView)mCaseListIndexView.get(i); if (view.getText().toString().toLowerCase().contains(caseText.toLowerCase())){ return view; } } return view; } /** * sleep */ public void sleep(long time){ try { Thread.sleep(time); } catch (InterruptedException e) { e.printStackTrace(); } } /** * snapshot */ public void screenShot(String shotName) { try { ScreenShot.shoot(WXPageActivity.wxPageActivityInstance, shotName); } catch (IOException e) { e.printStackTrace(); } } public void setViewGroup(ViewGroup viewGroup){ mViewGroup = viewGroup; } public void addAllTargetView(String target){ int max = 6; int count =0 ; while (count < max){ TouchUtils.dragQuarterScreenUp(this, this.getActivity()); mViewGroup = (ViewGroup) waTestPageActivity.findViewById(R.id.container); mCaseListIndexView = ViewUtil.findViewWithText(mViewGroup, target); mCaseListIndexView.addAll(mCaseListIndexView); count ++; } } }
/* * Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. */ /* * Copyright 2001-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: RealType.java,v 1.2.4.1 2005/09/05 11:28:45 pvedula Exp $ */ package com.sun.org.apache.xalan.internal.xsltc.compiler.util; import com.sun.org.apache.bcel.internal.generic.BranchHandle; import com.sun.org.apache.bcel.internal.generic.CHECKCAST; import com.sun.org.apache.bcel.internal.generic.ConstantPoolGen; import com.sun.org.apache.bcel.internal.generic.DLOAD; import com.sun.org.apache.bcel.internal.generic.DSTORE; import com.sun.org.apache.bcel.internal.generic.GOTO; import com.sun.org.apache.bcel.internal.generic.IFEQ; import com.sun.org.apache.bcel.internal.generic.IFNE; import com.sun.org.apache.bcel.internal.generic.INVOKESPECIAL; import com.sun.org.apache.bcel.internal.generic.INVOKESTATIC; import com.sun.org.apache.bcel.internal.generic.INVOKEVIRTUAL; import com.sun.org.apache.bcel.internal.generic.Instruction; import com.sun.org.apache.bcel.internal.generic.InstructionConstants; import com.sun.org.apache.bcel.internal.generic.InstructionList; import com.sun.org.apache.bcel.internal.generic.LocalVariableGen; import com.sun.org.apache.bcel.internal.generic.NEW; import com.sun.org.apache.xalan.internal.xsltc.compiler.Constants; import com.sun.org.apache.xalan.internal.xsltc.compiler.FlowList; /** * @author Jacek Ambroziak * @author Santiago Pericas-Geertsen */ public final class RealType extends NumberType { protected RealType() {} public String toString() { return "real"; } public boolean identicalTo(Type other) { return this == other; } public String toSignature() { return "D"; } public com.sun.org.apache.bcel.internal.generic.Type toJCType() { return com.sun.org.apache.bcel.internal.generic.Type.DOUBLE; } /** * @see Type#distanceTo */ public int distanceTo(Type type) { if (type == this) { return 0; } else if (type == Type.Int) { return 1; } else { return Integer.MAX_VALUE; } } /** * Translates a real into an object of internal type <code>type</code>. The * translation to int is undefined since reals are never converted to ints. * * @see Type#translateTo */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, Type type) { if (type == Type.String) { translateTo(classGen, methodGen, (StringType) type); } else if (type == Type.Boolean) { translateTo(classGen, methodGen, (BooleanType) type); } else if (type == Type.Reference) { translateTo(classGen, methodGen, (ReferenceType) type); } else if (type == Type.Int) { translateTo(classGen, methodGen, (IntType) type); } else { ErrorMsg err = new ErrorMsg(ErrorMsg.DATA_CONVERSION_ERR, toString(), type.toString()); classGen.getParser().reportError(Constants.FATAL, err); } } /** * Expects a real on the stack and pushes its string value by calling * <code>Double.toString(double d)</code>. * * @see Type#translateTo */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, StringType type) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); il.append(new INVOKESTATIC(cpg.addMethodref(BASIS_LIBRARY_CLASS, "realToString", "(D)" + STRING_SIG))); } /** * Expects a real on the stack and pushes a 0 if that number is 0.0 and * a 1 otherwise. * * @see Type#translateTo */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, BooleanType type) { final InstructionList il = methodGen.getInstructionList(); FlowList falsel = translateToDesynthesized(classGen, methodGen, type); il.append(ICONST_1); final BranchHandle truec = il.append(new GOTO(null)); falsel.backPatch(il.append(ICONST_0)); truec.setTarget(il.append(NOP)); } /** * Expects a real on the stack and pushes a truncated integer value * * @see Type#translateTo */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, IntType type) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); il.append(new INVOKESTATIC(cpg.addMethodref(BASIS_LIBRARY_CLASS, "realToInt","(D)I"))); } /** * Translates a real into a non-synthesized boolean. It does not push a * 0 or a 1 but instead returns branchhandle list to be appended to the * false list. A NaN must be converted to "false". * * @see Type#translateToDesynthesized */ public FlowList translateToDesynthesized(ClassGenerator classGen, MethodGenerator methodGen, BooleanType type) { LocalVariableGen local; final FlowList flowlist = new FlowList(); final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); // Store real into a local variable il.append(DUP2); local = methodGen.addLocalVariable("real_to_boolean_tmp", com.sun.org.apache.bcel.internal.generic.Type.DOUBLE, null, null); local.setStart(il.append(new DSTORE(local.getIndex()))); // Compare it to 0.0 il.append(DCONST_0); il.append(DCMPG); flowlist.add(il.append(new IFEQ(null))); //!!! call isNaN // Compare it to itself to see if NaN il.append(new DLOAD(local.getIndex())); local.setEnd(il.append(new DLOAD(local.getIndex()))); il.append(DCMPG); flowlist.add(il.append(new IFNE(null))); // NaN != NaN return flowlist; } /** * Expects a double on the stack and pushes a boxed double. Boxed * double are represented by an instance of <code>java.lang.Double</code>. * * @see Type#translateTo */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, ReferenceType type) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); il.append(new NEW(cpg.addClass(DOUBLE_CLASS))); il.append(DUP_X2); il.append(DUP_X2); il.append(POP); il.append(new INVOKESPECIAL(cpg.addMethodref(DOUBLE_CLASS, "<init>", "(D)V"))); } /** * Translates a real into the Java type denoted by <code>clazz</code>. * Expects a real on the stack and pushes a number of the appropriate * type after coercion. */ public void translateTo(ClassGenerator classGen, MethodGenerator methodGen, final Class clazz) { final InstructionList il = methodGen.getInstructionList(); if (clazz == Character.TYPE) { il.append(D2I); il.append(I2C); } else if (clazz == Byte.TYPE) { il.append(D2I); il.append(I2B); } else if (clazz == Short.TYPE) { il.append(D2I); il.append(I2S); } else if (clazz == Integer.TYPE) { il.append(D2I); } else if (clazz == Long.TYPE) { il.append(D2L); } else if (clazz == Float.TYPE) { il.append(D2F); } else if (clazz == Double.TYPE) { il.append(NOP); } // Is Double <: clazz? I.e. clazz in { Double, Number, Object } else if (clazz.isAssignableFrom(Double.class)) { translateTo(classGen, methodGen, Type.Reference); } else { ErrorMsg err = new ErrorMsg(ErrorMsg.DATA_CONVERSION_ERR, toString(), clazz.getName()); classGen.getParser().reportError(Constants.FATAL, err); } } /** * Translates an external (primitive) Java type into a real. Expects a java * object on the stack and pushes a real (i.e., a double). */ public void translateFrom(ClassGenerator classGen, MethodGenerator methodGen, Class clazz) { InstructionList il = methodGen.getInstructionList(); if (clazz == Character.TYPE || clazz == Byte.TYPE || clazz == Short.TYPE || clazz == Integer.TYPE) { il.append(I2D); } else if (clazz == Long.TYPE) { il.append(L2D); } else if (clazz == Float.TYPE) { il.append(F2D); } else if (clazz == Double.TYPE) { il.append(NOP); } else { ErrorMsg err = new ErrorMsg(ErrorMsg.DATA_CONVERSION_ERR, toString(), clazz.getName()); classGen.getParser().reportError(Constants.FATAL, err); } } /** * Translates an object of this type to its boxed representation. */ public void translateBox(ClassGenerator classGen, MethodGenerator methodGen) { translateTo(classGen, methodGen, Type.Reference); } /** * Translates an object of this type to its unboxed representation. */ public void translateUnBox(ClassGenerator classGen, MethodGenerator methodGen) { final ConstantPoolGen cpg = classGen.getConstantPool(); final InstructionList il = methodGen.getInstructionList(); il.append(new CHECKCAST(cpg.addClass(DOUBLE_CLASS))); il.append(new INVOKEVIRTUAL(cpg.addMethodref(DOUBLE_CLASS, DOUBLE_VALUE, DOUBLE_VALUE_SIG))); } public Instruction ADD() { return InstructionConstants.DADD; } public Instruction SUB() { return InstructionConstants.DSUB; } public Instruction MUL() { return InstructionConstants.DMUL; } public Instruction DIV() { return InstructionConstants.DDIV; } public Instruction REM() { return InstructionConstants.DREM; } public Instruction NEG() { return InstructionConstants.DNEG; } public Instruction LOAD(int slot) { return new DLOAD(slot); } public Instruction STORE(int slot) { return new DSTORE(slot); } public Instruction POP() { return POP2; } public Instruction CMP(boolean less) { return less ? InstructionConstants.DCMPG : InstructionConstants.DCMPL; } public Instruction DUP() { return DUP2; } }
/*- * #%L * PropertiesFramework :: Core * %% * Copyright (C) 2017 LeanFrameworks * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package com.github.leanframeworks.propertiesframework.base.property.simple; import com.github.leanframeworks.propertiesframework.api.property.ListPropertyChangeListener; import com.github.leanframeworks.propertiesframework.base.property.AbstractReadableWritableListProperty; import com.github.leanframeworks.propertiesframework.base.utils.ValueUtils; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.ListIterator; /** * Readable/writable list property backed by a {@link List}. * * @param <T> Type of values handled by this list property and the proxied list. */ public class SimpleListProperty<T> extends AbstractReadableWritableListProperty<T> implements List<T> { /** * Proxied list. */ private final List<T> internal = new ArrayList<>(); /** * Read-only version of the proxied list. */ private final List<T> unmodifiable = Collections.unmodifiableList(internal); /** * Constructor. */ public SimpleListProperty() { super(); } /** * Constructor adding the specified listeners. * * @param listeners Listeners to be added. */ @SafeVarargs public SimpleListProperty(ListPropertyChangeListener<T>... listeners) { super(listeners); } /** * Constructor specifying the initial items. * * @param items Initial items. */ public SimpleListProperty(List<T> items) { super(); internal.addAll(items); } /** * Constructor specifying the initial items and adding the specified listeners. * <p> * Note that the specified listeners will not be notified for the addition of the specified initial items. * * @param items Initial items. * @param listeners Listeners to be added. */ @SafeVarargs public SimpleListProperty(List<T> items, ListPropertyChangeListener<T>... listeners) { super(); // Without listeners internal.addAll(items); for (ListPropertyChangeListener<T> listener : listeners) { addChangeListener(listener); } } /** * @see AbstractReadableWritableListProperty#size() * @see List#size() */ @Override public int size() { return internal.size(); } /** * @see AbstractReadableWritableListProperty#isEmpty() * @see List#isEmpty() */ @Override public boolean isEmpty() { return internal.isEmpty(); } /** * @see AbstractReadableWritableListProperty#get(int) * @see List#get(int) */ @Override public T get(int index) { return internal.get(index); } /** * @see AbstractReadableWritableListProperty#set(int, Object) * @see List#set(int, Object) */ @Override public T set(int index, T item) { T oldItem = internal.set(index, item); if (!ValueUtils.areEqual(oldItem, item)) { List<T> oldItems = Collections.unmodifiableList(Collections.singletonList(oldItem)); List<T> newItems = Collections.unmodifiableList(Collections.singletonList(oldItem)); doNotifyListenersOfChangedValues(index, oldItems, newItems); } return oldItem; } /** * @see AbstractReadableWritableListProperty#add(Object) * @see List#add(Object) */ @Override public boolean add(T item) { boolean added = internal.add(item); if (added) { doNotifyListenersOfAddedValues(internal.size() - 1, Collections.unmodifiableList(Collections.singletonList(item))); } return added; } /** * @see AbstractReadableWritableListProperty#add(int, Object) * @see List#add(int, Object) */ @Override public void add(int index, T item) { internal.add(index, item); doNotifyListenersOfAddedValues(index, Collections.singletonList(item)); } /** * @see AbstractReadableWritableListProperty#remove(Object) * @see List#remove(Object) */ @Override public boolean remove(Object item) { int index = internal.indexOf(item); if (index >= 0) { remove(index); } return index >= 0; } /** * @see AbstractReadableWritableListProperty#remove(int) * @see List#remove(int) */ @Override public T remove(int index) { T oldItem = internal.remove(index); doNotifyListenersOfRemovedValues(index, Collections.singletonList(oldItem)); return oldItem; } /** * @see AbstractReadableWritableListProperty#addAll(Collection) * @see List#addAll(Collection) */ @Override public boolean addAll(Collection<? extends T> items) { int firstIndex = internal.size(); boolean added = internal.addAll(items); doNotifyListenersOfAddedValues(firstIndex, new ArrayList<T>(items)); return added; } /** * @see AbstractReadableWritableListProperty#addAll(int, Collection) * @see List#addAll(int, Collection) */ @Override public boolean addAll(int index, Collection<? extends T> items) { boolean added = internal.addAll(index, items); doNotifyListenersOfAddedValues(index, new ArrayList<T>(items)); return added; } /** * @see AbstractReadableWritableListProperty#removeAll(Collection) * @see List#removeAll(Collection) */ @Override public boolean removeAll(Collection<?> items) { boolean removed = false; for (Object item : items) { removed |= remove(item); } return removed; } /** * @see AbstractReadableWritableListProperty#retainAll(Collection) * @see List#retainAll(Collection) */ @Override public boolean retainAll(Collection<?> items) { Collection<T> toBeRemoved = new ArrayList<>(); for (T item : internal) { if (!items.contains(item)) { toBeRemoved.add(item); } } return removeAll(toBeRemoved); } /** * @see AbstractReadableWritableListProperty#clear() * @see List#clear() */ @Override public void clear() { if (!internal.isEmpty()) { List<T> removed = new ArrayList<>(internal); internal.clear(); doNotifyListenersOfRemovedValues(0, removed); } } /** * @see AbstractReadableWritableListProperty#contains(Object) * @see List#contains(Object) */ @Override public boolean contains(Object item) { return internal.contains(item); } /** * @see AbstractReadableWritableListProperty#containsAll(Collection) * @see List#containsAll(Collection) */ @Override public boolean containsAll(Collection<?> items) { return internal.containsAll(items); } /** * @see List#indexOf(Object) */ @Override public int indexOf(Object item) { return internal.indexOf(item); } /** * @see List#lastIndexOf(Object) */ @Override public int lastIndexOf(Object item) { return internal.lastIndexOf(item); } /** * @see List#toArray() */ @Override public Object[] toArray() { return internal.toArray(); } /** * @see List#toArray(Object[]) */ @Override public <U> U[] toArray(U[] a) { return internal.toArray(a); } /** * @see AbstractReadableWritableListProperty#iterator() * @see List#iterator() */ @Override public Iterator<T> iterator() { return unmodifiable.iterator(); } /** * @see List#listIterator() */ @Override public ListIterator<T> listIterator() { return unmodifiable.listIterator(); } /** * @see List#listIterator(int) */ @Override public ListIterator<T> listIterator(int index) { return unmodifiable.listIterator(index); } /** * @see List#subList(int, int) */ @Override public List<T> subList(int fromIndex, int toIndex) { return unmodifiable.subList(fromIndex, toIndex); } /** * @see AbstractReadableWritableListProperty#asUnmodifiableList() */ @Override public List<T> asUnmodifiableList() { return unmodifiable; } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.tests.gui.framework.fixture; import com.google.common.base.Joiner; import com.google.common.base.Splitter; import com.intellij.ide.errorTreeView.*; import com.intellij.openapi.externalSystem.service.notification.EditableNotificationMessageElement; import com.intellij.openapi.externalSystem.service.notification.NotificationMessageElement; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.pom.Navigatable; import com.intellij.ui.content.Content; import org.fest.swing.core.Robot; import org.fest.swing.edt.GuiQuery; import org.fest.swing.edt.GuiTask; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.HyperlinkEvent; import javax.swing.tree.TreeCellEditor; import java.io.File; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile; import static javax.swing.event.HyperlinkEvent.EventType.ACTIVATED; import static junit.framework.Assert.assertNotNull; import static org.fest.assertions.Assertions.assertThat; import static org.fest.reflect.core.Reflection.field; import static org.fest.swing.awt.AWT.visibleCenterOf; import static org.fest.swing.edt.GuiActionRunner.execute; import static org.fest.util.Strings.quote; public class MessagesToolWindowFixture extends ToolWindowFixture { MessagesToolWindowFixture(@NotNull Project project, @NotNull Robot robot) { super(ToolWindowId.MESSAGES_WINDOW, project, robot); } @NotNull public ContentFixture getGradleSyncContent() { Content content = getContent("Gradle Sync"); assertNotNull(content); return new SyncContentFixture(content); } @NotNull public ContentFixture getGradleBuildContent() { Content content = getContent("Gradle Build"); assertNotNull(content); return new BuildContentFixture(content); } public abstract static class ContentFixture { @NotNull private final Content myContent; private ContentFixture(@NotNull Content content) { myContent = content; } @NotNull public MessageFixture findMessageContainingText(@NotNull ErrorTreeElementKind kind, @NotNull final String text) { ErrorTreeElement element = doFindMessage(kind, new MessageMatcher() { @Override protected boolean matches(@NotNull String[] lines) { for (String s : lines) { if (s.contains(text)) { return true; } } return false; } }); return createFixture(element); } @NotNull public MessageFixture findMessage(@NotNull ErrorTreeElementKind kind, @NotNull MessageMatcher matcher) { ErrorTreeElement found = doFindMessage(kind, matcher); return createFixture(found); } @NotNull protected abstract MessageFixture createFixture(@NotNull ErrorTreeElement element); @NotNull private ErrorTreeElement doFindMessage(@NotNull final ErrorTreeElementKind kind, @NotNull final MessageMatcher matcher) { ErrorTreeElement found = execute(new GuiQuery<ErrorTreeElement>() { @Override @Nullable protected ErrorTreeElement executeInEDT() throws Throwable { NewErrorTreeViewPanel component = (NewErrorTreeViewPanel)myContent.getComponent(); ErrorViewStructure errorView = component.getErrorViewStructure(); Object root = errorView.getRootElement(); return findMessage(errorView, errorView.getChildElements(root), matcher, kind); } }); assertNotNull(String.format("Failed to find message of type %1$s and matching text %2$s", kind, matcher.toString()), found); return found; } @Nullable private static ErrorTreeElement findMessage(@NotNull ErrorViewStructure errorView, @NotNull ErrorTreeElement[] children, @NotNull MessageMatcher matcher, @NotNull ErrorTreeElementKind kind) { for (ErrorTreeElement child : children) { if (child instanceof GroupingElement) { ErrorTreeElement found = findMessage(errorView, errorView.getChildElements(child), matcher, kind); if (found != null) { return found; } } if (kind == child.getKind() && matcher.matches(child.getText())) { return child; } } return null; } } public static abstract class MessageMatcher { protected abstract boolean matches(@NotNull String[] text); @NotNull public static MessageMatcher firstLineStartingWith(@NotNull final String prefix) { return new MessageMatcher() { @Override public boolean matches(@NotNull String[] text) { assertThat(text).isNotEmpty(); return text[0].startsWith(prefix); } @Override public String toString() { return "first line starting with " + quote(prefix); } }; } } public class SyncContentFixture extends ContentFixture { SyncContentFixture(@NotNull Content content) { super(content); } @Override @NotNull protected MessageFixture createFixture(@NotNull ErrorTreeElement element) { return new SyncMessageFixture(myRobot, element); } } public class BuildContentFixture extends ContentFixture { BuildContentFixture(@NotNull Content content) { super(content); } @Override @NotNull protected MessageFixture createFixture(@NotNull ErrorTreeElement element) { throw new UnsupportedOperationException(); } } public abstract static class MessageFixture { private static final Pattern ANCHOR_TAG_PATTERN = Pattern.compile("<a href=\"(.*?)\">([^<]+)</a>"); @NotNull protected final Robot myRobot; @NotNull protected final ErrorTreeElement myTarget; protected MessageFixture(@NotNull Robot robot, @NotNull ErrorTreeElement target) { myRobot = robot; myTarget = target; } @NotNull public abstract HyperlinkFixture findHyperlink(@NotNull String hyperlinkText); @NotNull protected String extractUrl(@NotNull String wholeText, @NotNull String hyperlinkText) { String url = null; Matcher matcher = ANCHOR_TAG_PATTERN.matcher(wholeText); while (matcher.find()) { String anchorText = matcher.group(2); // Text may be spread across multiple lines. Put everything in one line. if (anchorText != null) { anchorText = anchorText.replaceAll("[\\s]+", " "); if (anchorText.equals(hyperlinkText)) { url = matcher.group(1); break; } } } assertNotNull("Failed to find URL for hyperlink " + quote(hyperlinkText), url); return url; } @NotNull public MessageFixture requireLocation(@NotNull File filePath, int line) { doRequireLocation(filePath, line); return this; } protected void doRequireLocation(@NotNull File expectedFilePath, int line) { assertThat(myTarget).isInstanceOf(NotificationMessageElement.class); NotificationMessageElement element = (NotificationMessageElement)myTarget; Navigatable navigatable = element.getNavigatable(); assertThat(navigatable).isInstanceOf(OpenFileDescriptor.class); OpenFileDescriptor descriptor = (OpenFileDescriptor)navigatable; File actualFilePath = virtualToIoFile(descriptor.getFile()); assertThat(actualFilePath).isEqualTo(expectedFilePath); assertThat((descriptor.getLine() + 1)).as("line").isEqualTo(line); // descriptor line is zero-based. } @NotNull public abstract String getText(); } public static class SyncMessageFixture extends MessageFixture { SyncMessageFixture(@NotNull Robot robot, @NotNull ErrorTreeElement target) { super(robot, target); } @Override @NotNull public HyperlinkFixture findHyperlink(@NotNull String hyperlinkText) { Pair<JEditorPane, String> cellEditorAndText = getCellEditorAndText(); String url = extractUrl(cellEditorAndText.getSecond(), hyperlinkText); return new SyncHyperlinkFixture(myRobot, url, cellEditorAndText.getFirst()); } @Override @NotNull public String getText() { String html = getCellEditorAndText().getSecond(); int startBodyIndex = html.indexOf("<body>"); assertThat(startBodyIndex).isGreaterThanOrEqualTo(0); int endBodyIndex = html.indexOf("</body>"); assertThat(endBodyIndex).isGreaterThan(startBodyIndex); String body = html.substring(startBodyIndex + 6 /* 6 = length of '<body>' */, endBodyIndex); List<String> lines = Splitter.on('\n').omitEmptyStrings().trimResults().splitToList(body); body = Joiner.on(' ').join(lines); return body; } @NotNull private Pair<JEditorPane, String> getCellEditorAndText() { // There is no specific UI component for a hyperlink in the "Messages" window. Instead we have a JEditorPane with HTML. This method // finds the anchor tags, and matches the text of each of them against the given text. If a matching hyperlink is found, we fire a // HyperlinkEvent, simulating a click on the actual hyperlink. assertThat(myTarget).isInstanceOf(EditableNotificationMessageElement.class); final JEditorPane editorComponent = execute(new GuiQuery<JEditorPane>() { @Override protected JEditorPane executeInEDT() throws Throwable { EditableNotificationMessageElement message = (EditableNotificationMessageElement)myTarget; TreeCellEditor cellEditor = message.getRightSelfEditor(); return field("editorComponent").ofType(JEditorPane.class).in(cellEditor).get(); } }); assertNotNull(editorComponent); String text = execute(new GuiQuery<String>() { @Override protected String executeInEDT() throws Throwable { return editorComponent.getText(); } }); assertNotNull(text); return Pair.create(editorComponent, text); } } public abstract static class HyperlinkFixture { @NotNull protected final Robot myRobot; @NotNull protected final String myUrl; protected HyperlinkFixture(@NotNull Robot robot, @NotNull String url) { myRobot = robot; myUrl = url; } @NotNull public HyperlinkFixture requireUrl(@NotNull String expected) { assertThat(myUrl).as("URL").isEqualTo(expected); return this; } @NotNull public HyperlinkFixture click() { click(true); return this; } /** * Simulates a click on the hyperlink. This method returns immediately and does not wait for any UI actions triggered by the click to be * finished. */ public HyperlinkFixture clickAndContinue() { click(false); return this; } private void click(boolean synchronous) { if (synchronous) { execute(new GuiTask() { @Override protected void executeInEDT() { new Runnable() { @Override public void run() { doClick(); } }.run(); } }); } else { //noinspection SSBasedInspection SwingUtilities.invokeLater(new Runnable() { @Override public void run() { doClick(); } }); } } protected abstract void doClick(); } public static class SyncHyperlinkFixture extends HyperlinkFixture { @NotNull private final JEditorPane myTarget; SyncHyperlinkFixture(@NotNull Robot robot, @NotNull String url, @NotNull JEditorPane target) { super(robot, url); myTarget = target; } @Override protected void doClick() { // at least move the mouse where the message is, so we can know that something is happening. myRobot.moveMouse(visibleCenterOf(myTarget)); myTarget.fireHyperlinkUpdate(new HyperlinkEvent(this, ACTIVATED, null, myUrl)); } } }
/*L * Copyright SAIC, SAIC-Frederick. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caadapter/LICENSE.txt for details. */ package gov.nih.nci.caadapter.mms.generator; import gov.nih.nci.caadapter.mms.map.AttributeMapping; import gov.nih.nci.caadapter.mms.map.CumulativeMapping; import gov.nih.nci.caadapter.mms.map.DependencyMapping; import gov.nih.nci.caadapter.mms.map.AssociationMapping; import gov.nih.nci.caadapter.common.MetaObject; import gov.nih.nci.caadapter.common.metadata.AssociationMetadata; import gov.nih.nci.caadapter.common.metadata.AttributeMetadata; import gov.nih.nci.caadapter.common.metadata.ColumnMetadata; import gov.nih.nci.caadapter.common.metadata.ModelMetadata; import gov.nih.nci.caadapter.common.metadata.ObjectMetadata; import gov.nih.nci.caadapter.common.metadata.TableMetadata; import gov.nih.nci.caadapter.common.util.Iso21090Util; import gov.nih.nci.caadapter.mms.validator.AttributeMappingValidator; import gov.nih.nci.caadapter.mms.validator.DependencyMappingValidator; import gov.nih.nci.caadapter.mms.validator.SingleAssociationMappingValidator; import gov.nih.nci.caadapter.ui.common.Iso21090uiUtil; import gov.nih.nci.ncicb.xmiinout.domain.*; import gov.nih.nci.ncicb.xmiinout.util.ModelUtil; import java.util.LinkedHashMap; import java.util.List; /** * The purpose of this class is to create and maintain a CumulativeMapping * object. As a caAdapter user drags and drops a source to a target in the UI * the system will first determine what type of mapping the user is attempting * to create then it will determine if the mapping is valid based on various business rules. * If the mapping is found to be valid it will add it to the CumulativeMapping * object as either a DependencyMapping, AttributeMapping, SingleAssociationMapping, or ManyToManyMapping object. * * @author OWNER: connellm * @author LAST UPDATE $Author: wangeug $ * @since caAdatper v4.0 * @version $Revision: 1.19 $ * @date $Date: 2009-09-29 17:39:07 $ */ public class CumulativeMappingGenerator { private CumulativeMapping cumulativeMapping; private ModelMetadata metaModel = null; private String errorMessage; private static CumulativeMappingGenerator instance; private CumulativeMappingGenerator(String xmiName) throws Exception { metaModel = new ModelMetadata(xmiName); cumulativeMapping = new CumulativeMapping(); } /** * @param xmiFileName_local */ public static boolean init(String xmiFileName_local) { try { instance = new CumulativeMappingGenerator(xmiFileName_local); }catch (Exception e){ e.printStackTrace(); return false; } return true; } public static CumulativeMappingGenerator getInstance() { return instance; } /** * @return the metaModel */ public ModelMetadata getMetaModel() { return metaModel; } /** * @return CumulativeMapping */ public CumulativeMapping getCumulativeMapping() { return cumulativeMapping; } /** * @param cumulativeMapping */ public void setCumulativeMapping(CumulativeMapping cMapping) { cumulativeMapping = cMapping; } public String getErrorMessage() { return errorMessage; } public void setErrorMessage(String eMessage) { errorMessage = eMessage; } /** * This method would be used to remove a previously created source to target mapping. * @param source Source element to be unmapped * @param target Target element to be unmapped * @param annotationSite path the annotation element for ISO 21090 datatype annotation * @param relativePath relative path of the current element to the annotation element * @return boolean */ public boolean unmap(String source, String target, String annotationSite, String relativePath){ boolean successfullyUnmapped = false; String sourceMappingType = determineSourceMappingType(source); String targetMappingType = determineTargetMappingType(target); if (sourceMappingType.equals("dependency")&& targetMappingType .equals("dependency")){ UMLClass sourceClass = getClass(source); UMLClass targetClass = getClass(target); successfullyUnmapped = unmapDependency(sourceClass, source, targetClass, target); } else if (sourceMappingType .equals("attribute") && targetMappingType.equals("attribute")) { if (annotationSite==null||annotationSite.equals("")) successfullyUnmapped = unmapAttribute(source,relativePath, target); else successfullyUnmapped = unmapAttribute(annotationSite,relativePath, target); } else if (sourceMappingType.equals("association")&& targetMappingType.equals("attribute")) { if (annotationSite==null||annotationSite.equals("")) successfullyUnmapped = unmapAssociation(source,relativePath, target); else successfullyUnmapped = unmapAssociation(annotationSite,relativePath, target); // } else if (sourceMappingType.equals("manytomanyassociation")&& targetMappingType.equals("attribute")){ // successfullyUnmapped = unmapManyToManyAssociation(source, target); } else { setErrorMessage(source.substring(source.lastIndexOf(".")+1) + " to " + target.substring(target.lastIndexOf(".")+1) + " is invalid/not_supported"); } return successfullyUnmapped; } /** * @param source Source element to be mapped. * @param target Target element to be mapped. * @param annotationSite path the annotation element for ISO 21090 datatype annotation * @param relativePath relative path of the current element to the annotation element * @param updateModel If the underneath UML should be updated as creating a new mapping * @return boolean */ public boolean map(String source, String target, String annotationSite, String relativePath, boolean updateModel){ boolean successfullyMapped = false; // The first thing that needs to be determined is what type of mapping is being attempted, i.e. Dependency, Attribute, Associatin, etc. String sourceMappingType = determineSourceMappingType(source); String targetMappingType = determineTargetMappingType(target); // Then the source and target mapping types are compared. They must be the same for the process to continue. For instance, if an attempt is made // to map an Object (vs. attribute) to a column in a table the mapping attempt will fail. if (sourceMappingType.equals("dependency")&& targetMappingType .equals("dependency")){ //Then the actual components from the UML model are realized UMLClass sourceClass = getClass(source); UMLClass targetClass = getClass(target); successfullyMapped = mapDependency(sourceClass, source, targetClass, target, updateModel); } else if (sourceMappingType.equals("attribute") && targetMappingType.equals("attribute")) { //Then the actual components from the UML model are realized successfullyMapped = mapAttribute(source, target,annotationSite, relativePath, updateModel); if(!successfullyMapped) return successfullyMapped; if (!updateModel) return successfullyMapped; if (relativePath==null||relativePath.equals("")) return successfullyMapped; //additional work for ISO datatype -- map collection element with/without join table //case I: the ancestor annotation attribute is mapped // set both "mapped-collection-table" and "correlation-table" // -- no action //case II: the ancestor annotation attribute is not mapped // neither set "mapped-collection-table" nor "correlation-table" // -- no action //case III the ancestor annotation attribute is mapped // only set "mapped-collection-table" //case III.a. the attribute is mapped to the same target table // -- no action, return //case III.b. the attribute is mapped to a different table // set "correlation-table" with previous "mapped-collection-table" // and change "mapped-collection-table" value to current table name UMLAttribute annotationAttr=ModelUtil.findAttribute(metaModel.getModel(), annotationSite); if (annotationAttr.getTaggedValue("correlation-table")!=null) //Case I: return successfullyMapped; if (annotationAttr.getTaggedValue("mapped-collection-table")==null) //Case II: return successfullyMapped; //Case III-- only "mapped-collection-table" is set ColumnMetadata columnMeta=(ColumnMetadata)metaModel.getModelMetadata().get(target); String tblName=columnMeta.getTableMetadata().getName(); String tblCollectionName=annotationAttr.getTaggedValue("mapped-collection-table").getValue(); if (tblName.equals(tblCollectionName)) //Case III.a: return successfullyMapped; //Case III.b, set "correlation-table" and change "mapped-collection-table" XMIAnnotationUtil.addTagValue(annotationAttr, "correlation-table", tblCollectionName); XMIAnnotationUtil.addTagValue(annotationAttr, "mapped-collection-table", tblName); } else if (sourceMappingType.equals("association")&& targetMappingType.equals("attribute")) { //Then the actual components from the UML model are realized successfullyMapped = mapAssociation(source, target, annotationSite, relativePath, updateModel); if(!successfullyMapped) return successfullyMapped; if (!updateModel) return successfullyMapped; //additional work for ISO datatype -- map collection element with/without join table //case I: no child attribute being mapped -- no action return //case II: child attribute are mapped to the same target table -- no action since it is "mapped-collection-table" //case III: child attribute are mapped to the different table // set "correlation-table" with previous "mapped-collection-table" // and change "mapped-collection-table" value to table name of child mapping target. ColumnMetadata columnMeta=(ColumnMetadata)metaModel.getModelMetadata().get(target); List<MetaObject> allMappedTargetMeta=cumulativeMapping.findMappedSourceOrChild(source); if (allMappedTargetMeta==null||allMappedTargetMeta.isEmpty()) return successfullyMapped; ColumnMetadata mappedColumnInOtherTable=null; for (MetaObject oneTrgtMeta:allMappedTargetMeta) { if (oneTrgtMeta instanceof ColumnMetadata) { ColumnMetadata oneTrgtColumn=(ColumnMetadata)oneTrgtMeta; if (!oneTrgtColumn.getParentXPath().equals(columnMeta.getParentXPath())) { mappedColumnInOtherTable=oneTrgtColumn; break; } } } if (mappedColumnInOtherTable==null) //case I and II return successfullyMapped; //case III String otherTblPath=mappedColumnInOtherTable.getParentXPath(); TableMetadata otherTblMeta=(TableMetadata)metaModel.getModelMetadata().get(otherTblPath); UMLAttribute annotationAttr=ModelUtil.findAttribute(metaModel.getModel(), annotationSite); //"mapped-collection-table" just being set in mapAssociation() call //copy it to "correlation-table" XMIAnnotationUtil.addTagValue(annotationAttr, "correlation-table", annotationAttr.getTaggedValue("mapped-collection-table").getValue()); //change "mapped-collection-table" to the other table XMIAnnotationUtil.addTagValue(annotationAttr, "mapped-collection-table", otherTblMeta.getName()); // } else if (sourceMappingType.equals("manytomanyassociation")&& targetMappingType.equals("attribute")){ // //Then the actual components from the UML model are realized // UMLAssociationEnd sourceEnd = getAssociationEnd(source); // successfullyMapped = mapManyToManyAssociation(sourceEnd, source, target); }else { setErrorMessage(source.substring(source.lastIndexOf(".")+1) + " to " + target.substring(target.lastIndexOf(".")+1) + " is invalid/not_supported"); } return successfullyMapped; } /** * This method determines if the source in the mapping is an object, attribute or association * @param source * @return String sourceMappingType */ private String determineSourceMappingType(String source){ String mappingType = null; if (isClass(source)){ mappingType = "dependency"; } else if (isAttribute(source)){ mappingType = "attribute"; //} //TO_DO else if (isSingleAssociation(source)&& isManyToManyAssociation(source)){ } else mappingType="association"; return mappingType; } /** * This method first checks to see if the target is a table or attribute and what kind of table or attribute * the target is in order to determine what type of target mapping is being attempted. * @param target * @return String target mapping type */ private String determineTargetMappingType(String target){ String mappingType = "undefinedTarget"; //We need to determine if the target is a table for a dependency mapping or a many to many mapping. if (isClass(target) && !isCorrelationTable(target)){ mappingType = "dependency"; } else if (isAttribute(target)){ mappingType = "attribute"; } else if (isCorrelationTable(target)){ mappingType = "correlationtable"; } return mappingType; } //TO_DO many of these convenience methods could be moved to a utility class to facilitate reuse by //other classes. Didn't have time to do that, but thats what I would do. /** * This method uses the path to the object being mapped to retrieve the actual UMLClass entity from the UML model(xmi file) * @param pathToClass * @return UMLClass */ private UMLClass getClass(String pathToClass){ UMLClass clazz = null; String[] modelElements = pathToClass.split("\\."); clazz = findClass(metaModel.getModel(), modelElements, 0, modelElements.length-1); return clazz; } /** * This method creates a dependency mapping by extracting values from target and source UMLClasses as well as the paths to * those objects extracted originally from the xmi file. After creating and validating the mapping the * method adds the mapping to the cumulative mapping object. * @param source * @param sourceXPath * @param target * @param targetXPath * @param updateModel If the underneath UML should be updated as creating a new mapping * @return boolean */ private boolean mapDependency(UMLClass source, String sourceXPath, UMLClass target, String targetXPath, boolean updateModel){ boolean successfullyMapped = false; DependencyMapping mapping = new DependencyMapping(); ObjectMetadata sourceMetadata = new ObjectMetadata(); sourceMetadata.setName(source.getName()); sourceMetadata.setXPath(sourceXPath); TableMetadata targetMetadata = new TableMetadata(); targetMetadata.setName(target.getName()); targetMetadata.setXPath(targetXPath); mapping.setSourceDependency(sourceMetadata); mapping.setTargetDependency(targetMetadata); DependencyMappingValidator validator = new DependencyMappingValidator(mapping); successfullyMapped = validator.isValid(); if (successfullyMapped){ cumulativeMapping.addDependencyMapping(mapping); //add dependency to UMLModel if (updateModel) XMIAnnotationUtil.addDataObjectDependency(metaModel.getModel(), targetXPath, sourceXPath); } else { setErrorMessage(validator.getValidationErrorMessage()); } return successfullyMapped; } /** * This method removes a DependencyMapping object from the CumulativeMapping object. * @param source * @param sourceXPath * @param target * @param targetXPath * @return */ private boolean unmapDependency(UMLClass source, String sourceXPath, UMLClass target, String targetXPath){ List<DependencyMapping> dependencyMapping = cumulativeMapping.getDependencyMappings(); for (DependencyMapping d : dependencyMapping) { if (d.getSourceDependency().getXPath().equals(sourceXPath) && d.getTargetDependency().getXPath().equals(targetXPath)) { cumulativeMapping.removeDependencyMapping(d); //remove dependency from UMLModel return XMIAnnotationUtil.removeDataObjectDependency(metaModel.getModel(), sourceXPath); } } return false; } /** * @param sourcePath * @param targetPath * @param updateModel If the underneath UML should be updated as creating a new mapping * @return boolean */ private boolean mapAttribute(String sourcePath, String targetPath, String annotationPath, String relativePath, boolean updateModel){ LinkedHashMap modelMeta = metaModel.getModelMetadata(); AttributeMetadata attributeMetadata = (AttributeMetadata)modelMeta.get(annotationPath);//.get(sourcePath); ColumnMetadata columnMetadata = (ColumnMetadata)modelMeta.get(targetPath); boolean successfullyMapped = false; AttributeMapping mapping = new AttributeMapping(); columnMetadata.setType(columnMetadata.TYPE_ATTRIBUTE); mapping.setAttributeMetadata(attributeMetadata); mapping.setColumnMetadata(columnMetadata); AttributeMappingValidator validator = new AttributeMappingValidator(mapping); successfullyMapped = validator.isValid(); if (successfullyMapped) { if (relativePath==null||relativePath.equals("")) cumulativeMapping.addAttributeMapping(mapping, sourcePath); else cumulativeMapping.addAttributeMapping(mapping, annotationPath+"."+relativePath); if (updateModel) { UMLAttribute xpathUMLAttribute=ModelUtil.findAttribute(metaModel.getModel(),columnMetadata.getXPath()); //remove the leading string:"Logical View.Logical Model." from source path String pureSrcPath=""; if (relativePath==null|relativePath.equals("")) pureSrcPath=XMIAnnotationUtil.getCleanPath(metaModel.getMmsPrefixObjectModel(), sourcePath); else pureSrcPath=XMIAnnotationUtil.getCleanPath(metaModel.getMmsPrefixObjectModel(), annotationPath) +"."+relativePath; XMIAnnotationUtil.addTagValue(xpathUMLAttribute, "mapped-attributes", pureSrcPath); } } else { setErrorMessage(validator.getValidationErrorMessage()); } return successfullyMapped; } /** * @param annotationPath * @param relativePath * @param targetPath */ private boolean unmapAttribute(String annotationPath, String relativePath, String targetPath){ List<AttributeMapping> attributeMapping = cumulativeMapping.getAttributeMappings(); for (AttributeMapping attr : attributeMapping) { // if (attr.getAttributeMetadata().getXPath().equals(sourcePath) && attr.getColumnMetadata().getXPath().equals(targetPath)) { if (attr.getColumnMetadata().getXPath().equals(targetPath)) { if (relativePath==null|relativePath.equals("")) cumulativeMapping.removeAttributeMapping(attr, annotationPath); else cumulativeMapping.removeAttributeMapping(attr, annotationPath+"."+relativePath); //remove "mapped-attributes" tag from UMLModel UMLAttribute xpathAttr=ModelUtil.findAttribute(metaModel.getModel(),attr.getColumnMetadata().getXPath()); return XMIAnnotationUtil.removeTagValue(xpathAttr, "mapped-attributes"); } } return false; } /** * @param sourceXPath * @param targetXPath * @param updateModel If the underneath UML should be updated as creating a new mapping * @return boolean */ private boolean mapAssociation(String sourceXPath, String targetXPath, String annotationPath, String relativePath, boolean updateModel){ LinkedHashMap modelMeta = metaModel.getModelMetadata(); AssociationMetadata sourceMetadata =null; MetaObject metaSrc= (MetaObject)modelMeta.get(sourceXPath); if (metaSrc!=null&&(metaSrc instanceof AssociationMetadata )) sourceMetadata=(AssociationMetadata)metaSrc; ColumnMetadata targetMetadata = (ColumnMetadata)modelMeta.get(targetXPath); boolean successfullyMapped = false; AssociationMapping mapping = new AssociationMapping(); targetMetadata.setType(targetMetadata.TYPE_ASSOCIATION); mapping.setAssociationEndMetadata(sourceMetadata); mapping.setColumnMetadata(targetMetadata); SingleAssociationMappingValidator validator = new SingleAssociationMappingValidator(mapping); successfullyMapped = validator.isValid(); if (successfullyMapped) { // cumulativeMapping.addAssociationMapping(mapping); if (relativePath==null||relativePath.equals("")) cumulativeMapping.addAssociationMapping(mapping, sourceXPath); else cumulativeMapping.addAssociationMapping(mapping, annotationPath); if (updateModel) { if (relativePath==null|relativePath.equals("")) XMIAnnotationUtil.annotateAssociationMapping(metaModel.getModel(),sourceXPath, targetXPath); else XMIAnnotationUtil.annotateAssociationMapping(metaModel.getModel(),annotationPath, targetXPath); } } else { setErrorMessage(validator.getValidationErrorMessage()); } return successfullyMapped; } /** * @param sourceXPath String * @param targetXPath String * @return boolean */ private boolean unmapAssociation(String annotationPath, String relativePath, String targetXPath){ List<AssociationMapping> singleAssociationMapping = cumulativeMapping.getAssociationMappings(); for (AssociationMapping assoS : singleAssociationMapping) { // if (assoS.getAssociationEndMetadata().getXPath().equals(sourceXPath) && assoS.getColumnMetadata().getXPath().equals(targetXPath)) { if (assoS.getColumnMetadata().getXPath().equals(targetXPath)) { if (relativePath==null|relativePath.equals("")) { cumulativeMapping.removeAssociationMapping(assoS, annotationPath); return XMIAnnotationUtil.deAnnotateAssociationMapping(metaModel.getModel(), annotationPath, targetXPath); } else { cumulativeMapping.removeAssociationMapping(assoS, annotationPath); return XMIAnnotationUtil.deAnnotateAssociationMapping(metaModel.getModel(), annotationPath, targetXPath); } } } return false; } /** * @param element * @return boolean */ private boolean isClass(String element){ UMLClass clazz = null; boolean isClass = false; String[] modelElements = element.split("\\."); clazz = findClass(metaModel.getModel(), modelElements, 0, modelElements.length-1); if (clazz != null){ // If class is not null then we have a UMLClass isClass = true; } return isClass; } /** * @param element * @return boolean */ private boolean isAttribute(String element){ Object foundObject=metaModel.getModelMetadata().get(element); if (foundObject==null) return false; if (foundObject instanceof AttributeMetadata) { if (Iso21090uiUtil.isCollectionDatatype((AttributeMetadata)foundObject)) return false; else if (Iso21090uiUtil.isDatatypeWithCollectionAttribute((AttributeMetadata)foundObject)) return false; //expand scope to treat all ISO complex type as association mapping //If an ISO complex is mapped to a table.column, it will create association mapping else if (Iso21090Util.iso21090ComplexTypes.contains(((AttributeMetadata)foundObject).getDatatype())) return false; else return true; } if(foundObject instanceof ColumnMetadata) return true; return false; // // UMLAttribute xpathUMLAttribute=ModelUtil.findAttribute(metaModel.getModel(),element); // if (xpathUMLAttribute!=null) // return true; // // UMLAttribute attribute = null; // UMLClass clazz = null; // String[] modelElements = element.split("\\."); // clazz = findClass(metaModel.getModel(), modelElements, 0, modelElements.length-2); // if (clazz!=null){ // LinkedHashMap modelMeta = metaModel.getModelMetadata(); // if (modelMeta.get(element)!= null) { // if (modelMeta.get(element) instanceof AttributeMetadata) return true; // else if (modelMeta.get(element) instanceof ColumnMetadata) return true; // else return false; // } // } // return isAttribute; } /** * @param element * @return boolean */ private boolean isCorrelationTable(String element){ boolean isCorrelationTable = false; UMLClass clazz = null; String[] modelElements = element.split("\\."); clazz = findClass(metaModel.getModel(), modelElements, 0, modelElements.length-1); if (clazz != null){ //If clazz is not null then we have a UMLClass //Then we check the Metadata model to see if the Class is a table //which it should be, then we need to check that the table is of type "correlation". //If it is a correlation table then this is a many to many mapping situation. if (metaModel.getModelMetadata().get(element).getClass().getName().equals("TableMetadata")) { TableMetadata table = (TableMetadata)metaModel.getModelMetadata().get(element); if (table.getType().equals("correlation")){ isCorrelationTable=true; } } } return isCorrelationTable; } /** * @param model * @param className * @return UMLClass */ private UMLClass findClass(UMLModel model, String[] className, int start, int end) { for(UMLPackage pkg : model.getPackages()) { if (pkg.getName().equals(className[start])) { UMLClass c = findClass(pkg, className, start+1, end); if(c != null) return c; } } return null; } /** * @param pkg * @param className * @return UMLClass */ private UMLClass findClass(UMLPackage pkg, String[] className, int start, int end) { if (start == end) { for(UMLClass clazz : pkg.getClasses()) { if(clazz.getName().equals(className[start])) return clazz; } } else { for(UMLPackage _pkg : pkg.getPackages()) { UMLClass c = findClass(_pkg, className,start+1,end); if(c != null) return c; } } return null; } /** * @param args */ public static void main(String[] args) { CumulativeMappingGenerator.init("C:/sample.xmi"); CumulativeMappingGenerator x =CumulativeMappingGenerator.getInstance(); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Gene","Logical View.Data Model.GENE", true ); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Taxon","Logical View.Data Model.TAXON"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Clone","Logical View.Data Model.CLONE"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Chromosome","Logical View.Data Model.CHROMOSOME"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Sequence","Logical View.Data Model.SEQUENCE"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Target","Logical View.Data Model.TARGET"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Library","Logical View.Data Model.LIBRARY"); // // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Taxon.id","Logical View.Data Model.TAXON.TAXON_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Clone.id","Logical View.Data Model.CLONE.CLONE_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Library.id","Logical View.Data Model.LIBRARY.LIBRARY_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Target.id","Logical View.Data Model.TARGET.TARGET_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Gene.id","Logical View.Data Model.GENE.GENE_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Sequence.id","Logical View.Data Model.SEQUENCE.SEQUENCE_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Chromosome.id","Logical View.Data Model.CHROMOSOME.CHROMOSOME_ID"); // // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Gene.chromosome","Logical View.Data Model.GENE.CHROMOSOME_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Chromosome.taxon","Logical View.Data Model.CHROMOSOME.TAXON_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Sequence.clone","Logical View.Data Model.SEQUENCE.CLONE_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Clone.library","Logical View.Data Model.CLONE.LIBRARY_ID"); // // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Gene.sequenceCollection","Logical View.Data Model.GENE_SEQUENCE.GENE_ID"); // x.map("Logical View.Logical Model.gov.nih.nci.cabio.domain.Sequence.geneCollection","Logical View.Data Model.GENE_SEQUENCE.SEQUENCE_ID"); // CumulativeMapping y = x.getCumulativeMapping(); } } /** * HISTORY: $Log: not supported by cvs2svn $ * HISTORY: Revision 1.18 2009/07/14 16:35:49 wangeug * HISTORY: clean codes * HISTORY: * HISTORY: Revision 1.17 2009/07/10 19:55:34 wangeug * HISTORY: MMS re-engineering * HISTORY: * HISTORY: Revision 1.16 2009/06/12 15:50:34 wangeug * HISTORY: clean code: caAdapter MMS 4.1.1 * HISTORY: * HISTORY: Revision 1.15 2008/09/26 20:35:27 linc * HISTORY: Updated according to code standard. * HISTORY: */
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * NegativeInteger.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT) */ package org.apache.axis2.databinding.types.xsd; import javax.xml.stream.XMLStreamWriter; /** * NegativeInteger bean class */ public class NegativeInteger implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = negativeInteger Namespace URI = http://www.w3.org/2001/XMLSchema Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://www.w3.org/2001/XMLSchema")){ return "xsd"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for NegativeInteger */ protected org.apache.axis2.databinding.types.NegativeInteger localNegativeInteger ; /** * Auto generated getter method * @return org.apache.axis2.databinding.types.NegativeInteger */ public org.apache.axis2.databinding.types.NegativeInteger getNegativeInteger(){ return localNegativeInteger; } /** * Auto generated setter method * @param param NegativeInteger */ public void setNegativeInteger(org.apache.axis2.databinding.types.NegativeInteger param){ this.localNegativeInteger=param; } public java.lang.String toString(){ return localNegativeInteger.toString(); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName); return factory.createOMElement(dataSource,parentQName); } public void serialize(final javax.xml.namespace.QName parentQName, XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, XMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.w3.org/2001/XMLSchema"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":negativeInteger", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "negativeInteger", xmlWriter); } } if (localNegativeInteger==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("negativeInteger cannot be null!!"); }else{ xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localNegativeInteger)); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); javax.xml.namespace.NamespaceContext nsContext = xmlWriter.getNamespaceContext(); while (true) { java.lang.String uri = nsContext.getNamespaceURI(prefix); if (uri == null || uri.length() == 0) { break; } prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT); if (localNegativeInteger != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localNegativeInteger)); } else { throw new org.apache.axis2.databinding.ADBException("negativeInteger cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ public static NegativeInteger fromString(java.lang.String value, java.lang.String namespaceURI){ NegativeInteger returnValue = new NegativeInteger(); returnValue.setNegativeInteger( org.apache.axis2.databinding.utils.ConverterUtil.convertToNegativeInteger(value)); return returnValue; } public static NegativeInteger fromString(javax.xml.stream.XMLStreamReader xmlStreamReader, java.lang.String content) { if (content.indexOf(":") > -1){ java.lang.String prefix = content.substring(0,content.indexOf(":")); java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix); return NegativeInteger.Factory.fromString(content,namespaceUri); } else { return NegativeInteger.Factory.fromString(content,""); } } /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static NegativeInteger parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ NegativeInteger object = new NegativeInteger(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"negativeInteger".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (NegativeInteger)org.apache.axis2.databinding.types.xsd.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() || reader.hasText()){ if (reader.isStartElement() || reader.hasText()){ java.lang.String content = reader.getElementText(); object.setNegativeInteger( org.apache.axis2.databinding.utils.ConverterUtil.convertToNegativeInteger(content)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
package com.xored.vertx.typed.rpc; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.Kryo.DefaultInstantiatorStrategy; import com.esotericsoftware.kryo.Registration; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import com.esotericsoftware.kryo.serializers.MapSerializer; import io.vertx.core.buffer.Buffer; import io.vertx.core.eventbus.DeliveryOptions; import io.vertx.core.eventbus.EventBus; import io.vertx.core.eventbus.Message; import io.vertx.core.eventbus.MessageConsumer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; /** * Factory to create client and server service for RPC communication. * * @author Konstantin Zaitsev */ public class EventBusServiceFactory { private static final Logger log = LoggerFactory.getLogger(EventBusServiceFactory.class); private static final String HEADER_METHOD_NAME = "method"; private static final ThreadLocal<Kryo> kryos = new ThreadLocal<Kryo>() { @SuppressWarnings("rawtypes") @Override protected Kryo initialValue() { Kryo kryo = new Kryo(); kryo.register(Map.class, new MapSerializer() { protected Map create(Kryo kryo, Input input, java.lang.Class<Map> type) { return new HashMap(); } }); kryo.setInstantiatorStrategy(new DefaultInstantiatorStrategy(new ListInstantiatorStrategy())); return kryo; } }; /** * Creates proxy client that invokes appropriate method of RPC service. * * @param eventBus EventBus instance * @param iface RPC service interface that marked with {@link EventBusService} annotation. * * @return proxy client of interface that use EventBus for communication. */ @SuppressWarnings("ThrowableResultOfMethodCallIgnored") public static <T> T createClient(EventBus eventBus, Class<T> iface) { EventBusService service = getEventBusServiceInterface(iface).getAnnotation(EventBusService.class); if (service == null) { throw new RuntimeException("Interface should has EventBusService annotiation."); } String address = service.value(); @SuppressWarnings("unchecked") T instance = (T) Proxy.newProxyInstance(iface.getClassLoader(), new Class[] { iface }, (proxy, method, args) -> { String methodName = method.getName(); Class<?>[] classes = method.getParameterTypes(); DeliveryOptions options = new DeliveryOptions().addHeader(HEADER_METHOD_NAME, methodName); Buffer buffer = null; if (classes.length > 0) { buffer = writeObjects(args); } if (method.getReturnType() == void.class) { if (method.getAnnotation(Publish.class) != null) { eventBus.publish(address, buffer, options); } else { eventBus.send(address, buffer, options); } return null; } else { Class<?> returnType = method.getReturnType(); if (returnType.isAssignableFrom(CompletableFuture.class)) { CompletableFuture<Object> result = new CompletableFuture<>(); eventBus.<Buffer>send(address, buffer, options, r -> { if (r.failed()) { result.completeExceptionally(r.cause()); return; } Message<Buffer> msg = r.result(); if (msg != null) { Buffer buf = msg.body(); Registration clazz = kryos.get().readClass(new Input(buf.getBytes())); if (clazz != null && Throwable.class.isAssignableFrom(clazz.getType())) { result.completeExceptionally( (Throwable) kryos.get().readClassAndObject(new Input(buf.getBytes()))); } else { result.complete(readObject(msg.body())); } } else { result.complete(null); } }); return result; } throw new RuntimeException("EventBusService support only CompletableFuture returns"); } }); return instance; } /** * Registers RPC service in Vertx EventBus. * * @param eventBus EventBus instance * @param serverHandler RPC service implementation * * @return EventBus message consumer that can be used to unregister service. */ public static <T> MessageConsumer<Buffer> registerServer(EventBus eventBus, T serverHandler) { log.debug("Register EventBus Service: {}", serverHandler.getClass().getName()); HashMap<String, Method> methods = new HashMap<>(); Class<?> serviceInterface = getEventBusServiceInterface(serverHandler.getClass()); EventBusService serviceAnnotation = serviceInterface.getAnnotation(EventBusService.class); String address = serviceAnnotation.value(); for (Method method : serviceInterface.getDeclaredMethods()) { method.setAccessible(true); methods.put(method.getName(), method); } MessageConsumer<Buffer> consumer = eventBus.<Buffer>consumer(address); consumer.handler(r -> { try { String methodName = r.headers().get(HEADER_METHOD_NAME); if (!methods.containsKey(methodName)) { String msg = String.format("Method %s not found", methodName); log.error(msg); r.fail(1, msg); return; } Method method = methods.get(methodName); Object result = null; try { if (method.getParameterTypes().length == 0) { result = method.invoke(serverHandler); } else { Object[] objects = readObjects(r.body(), method.getParameterTypes().length); result = method.invoke(serverHandler, objects); } if (method.getReturnType().isAssignableFrom(CompletableFuture.class)) { ((CompletableFuture<?>) result).whenComplete((msg, e) -> { if (e != null) { r.reply(writeObject(e)); } else { r.reply(writeObject(msg)); } }); } } catch (InvocationTargetException ex) { r.reply(writeObject(ex.getTargetException())); } } catch (Throwable e) { log.error(e.getMessage(), e); r.fail(-1, e.getMessage()); } }); return consumer; } private static Class<?> getEventBusServiceInterface(Class<?> clazz) { if (clazz.getAnnotation(EventBusService.class) != null) { return clazz; } for (Class<?> iface : clazz.getInterfaces()) { if (iface.getAnnotation(EventBusService.class) != null) { return iface; } } if (clazz.getSuperclass() != null) { return getEventBusServiceInterface(clazz.getSuperclass()); } throw new RuntimeException(String.format("%s interface has not EventBusService annotation", clazz.getName())); } private static Object[] readObjects(Buffer buffer, int count) { final Input input = new Input(buffer.getBytes()); if (count == 1) { return new Object[] { kryos.get().readClassAndObject(input) }; } List<Object> result = new ArrayList<>(); for (int i = 0; i < count; i++) { result.add(kryos.get().readClassAndObject(input)); } return result.toArray(new Object[result.size()]); } @SuppressWarnings("unchecked") private static <T> T readObject(Buffer buffer) { return (T) kryos.get().readClassAndObject(new Input(buffer.getBytes())); } private static Buffer writeObjects(Object[] objs) { final Output output = new Output(2048, Integer.MAX_VALUE); for (int i = 0; i < objs.length; i++) { kryos.get().writeClassAndObject(output, objs[i]); } return Buffer.buffer(output.toBytes()); } private static Buffer writeObject(Object objs) { final Output output = new Output(2048, Integer.MAX_VALUE); kryos.get().writeClassAndObject(output, objs); return Buffer.buffer(output.toBytes()); } }
package com.ofg.bankstatement.util; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.ofg.loans.api.beans.payment.PaymentInfo; import com.ofg.loans.domain.Iban; import com.ofg.loans.domain.model.client.ClientAddress; import com.ofg.loans.domain.util.MD5Utils; import com.ofg.loans.util.date.DateTimeUtils; import com.ofg.loans.util.numeric.BigDecimalUtils; import org.apache.commons.lang.StringUtils; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.List; public class BanksUtils { private static final String ONE_WHITE_SPACE = " "; private static final String TWO_WHITE_SPACES = " "; private static final String COUNTRY_CODE = "PL"; private static final String QUOTATION_MARK = "\""; private static final String ADDRESS_PREFIX_OS = "OS."; private static final String ADDRESS_PREFIX_WS = "WS."; private static final String ADDRESS_PREFIX_UL = "UL."; private static final String MR_TITLE = "PAN"; private static final String MRS_TITLE = "PANI"; public static String addValue(String val, String fieldSeparator, boolean addQotationMarks) { String additionalSymbol = addQotationMarks ? QUOTATION_MARK : ""; return additionalSymbol + val + additionalSymbol + fieldSeparator; } public static String getBankSortCode(String accountNumber) { Iban iban = Iban.split(addCountryCodeToBankAccount(accountNumber)); return iban.getBankCode(); } public static String convertCurrencySumToCents(BigDecimal amount) { amount = amount.multiply(BigDecimalUtils.amount(100)); Integer intAmount = amount.intValueExact(); return intAmount.toString(); } public static BigDecimal convertCentsToCurrencyAmount(String cents) { BigDecimal amount = BigDecimalUtils.amount(cents); amount = amount.divide(BigDecimalUtils.amount(100)); return amount; } public static String buildSubFieldBlock(List<String> subFields, String lineSeparator, int subFieldCount) { StringBuilder line = new StringBuilder(); for (int i = 0; i < subFieldCount; i++) { String string = subFields.size() > i ? subFields.get(i) : ""; if (i > 0) { line.append(lineSeparator); } line.append(string); } return line.toString(); } public static String removeSplitSymbols(String text, String splitSymbol, String replaceSymbol) { return text.replace(splitSymbol, replaceSymbol).trim(); } public static String addCountryCodeToBankAccount(String bankAccount) { if (!StringUtils.isEmpty(bankAccount) && !COUNTRY_CODE.equals(bankAccount.substring(0, 2)) && bankAccount.length() != 28) { bankAccount = COUNTRY_CODE + bankAccount; } return bankAccount; } public static String removeCountryCodeFromBankAccount(String bankAccount) { if (COUNTRY_CODE.equals(bankAccount.substring(0, 2))) { bankAccount = bankAccount.substring(2); } return bankAccount; } public static int indexOfFirstLetter(String string, int startFromIndex) { return indexOfFirstOccurrence(string, startFromIndex, false, true); } public static int indexOfFirstOccurrence(String string, int startFromIndex, boolean findDigitChar, boolean findLetterChar) { if (string != null && startFromIndex > -1) { char[] chars = string.toCharArray(); for (int i = startFromIndex; i < chars.length; i++) { if (findDigitChar && Character.isDigit(chars[i])) { return i; } else if (findLetterChar && Character.isLetter(chars[i])) { return i; } } } return -1; } public static int indexOfSecondWhiteSpace(String payerDetails) { if (StringUtils.isNotEmpty(payerDetails)) { int indexOfFirstWhitespace = payerDetails.indexOf(' '); return payerDetails.indexOf(' ', indexOfFirstWhitespace + 1); } else { return 0; } } public static String parseFieldValue(String fieldPrefix, String source) { return StringUtils.removeStart(source, fieldPrefix); } public static String getLineIfExists(String linePrefix, List<String> lines, int currentIndex) { return lines != null && StringUtils.isNotEmpty(linePrefix) && currentIndex > -1 && currentIndex < lines.size() && lines.get(currentIndex).startsWith(linePrefix) ? parseFieldValue( linePrefix, lines.get(currentIndex)) : StringUtils.EMPTY; } public static int indexOfFirstLineWithPrefix(List<String> lines, String prefix) { for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); if (line.startsWith(prefix)) { return i; } } return -1; } public static String joinFieldValuesFromTo(List<String> lines, String fieldPrefix, int prefixStartingLineIndex, int indexFrom, int linesCount) { StringBuilder details = new StringBuilder(); int currentDetailLineIndex = prefixStartingLineIndex; for (int i = indexFrom; i < indexFrom + linesCount; i++) { String line = lines.get(i); String fieldName = fieldPrefix + currentDetailLineIndex; if (!line.startsWith(fieldName)) { break; } details.append(parseFieldValue(fieldName, line)); currentDetailLineIndex++; } return details.toString(); } public static String md5BankReference(PaymentInfo paymentInfo) { StringBuilder sb = new StringBuilder(); sb.append(DateTimeUtils.toDateString(paymentInfo.getBookingDate())); sb.append(paymentInfo.getAmount().toString()); String accountNumber = paymentInfo.getAccountNumber(); if (StringUtils.isNotBlank(accountNumber)) { sb.append(accountNumber); } String companyBankAccount = paymentInfo.getCompanyBankAccount(); if (StringUtils.isNotBlank(companyBankAccount)) { sb.append(companyBankAccount); } String details = paymentInfo.getDetails(); if (StringUtils.isNotBlank(details)) { sb.append(details.toUpperCase().replaceAll("\\s", "")); } return MD5Utils.generateMD5(sb.toString()); } public static boolean nextTwoCharsIsDigits(int index, String line) { return found(index) && line.length() >= index + 3 && Character.isDigit(line.charAt(index + 1)) && Character.isDigit(line.charAt(index + 2)); } public static List<String> extractOneSubFieldInOneLine(List<String> lines, String subFieldPrefixSymbol) { List<String> oneFieldInOneRowLines = new ArrayList<String>(); for (String line : lines) { int indexOfSubField = line.indexOf(subFieldPrefixSymbol, 1); boolean nextTwoSymbolsIsDigits = nextTwoCharsIsDigits(indexOfSubField, line); if (indexOfSubField != -1 && nextTwoSymbolsIsDigits) { String firstField = line.substring(0, indexOfSubField); oneFieldInOneRowLines.add(firstField); String secondField = line.substring(indexOfSubField, line.length()); oneFieldInOneRowLines.add(secondField); } else { oneFieldInOneRowLines.add(line); } } return oneFieldInOneRowLines; } public static String getFieldValue(List<String> lines, String fieldName) { int index = indexOfFirstLineWithPrefix(lines, fieldName); if (index > -1) { return getLineIfExists(fieldName, lines, index); } else { return StringUtils.EMPTY; } } public static String getMultilineFieldValue( List<String> lines, String field, String nextField) { Preconditions.checkArgument(field != null, "field cannot be null"); String joinedLines = Joiner.on("").join(lines); int begin = joinedLines.indexOf(field); if (begin < 0) { return StringUtils.EMPTY; } begin += field.length(); int end = joinedLines.length(); if (nextField != null) { int pos = joinedLines.indexOf(nextField, begin); if (pos >= 0) { end = pos; } } return joinedLines.substring(begin, end); } public static int indexOfText(String text, List<String> searchStrings) { if (searchStrings == null) { return -1; } for (String searchString : searchStrings) { int indexIgnoreCase = StringUtils.indexOfIgnoreCase(text, searchString); if (indexIgnoreCase > -1) { return indexIgnoreCase; } } return -1; } public static String parsePersonNameFromDetails(String details) { if (details == null) { return ""; } if (potentiallyHasTitle(details)) { details = tryDropTitle(details); } int indexOfSecondWhiteSpace = BanksUtils.indexOfSecondWhiteSpace(details); int indexOfSpecAddressSymbols = BanksUtils.indexOfText(details, Arrays.asList(ADDRESS_PREFIX_UL, ADDRESS_PREFIX_WS, ADDRESS_PREFIX_OS)); int indexOfTwoWhiteSpaces = details.indexOf(TWO_WHITE_SPACES); int indexOfDigitSymbol = indexOfFirstOccurrence(details, 0, true, false); int indexOfAddress = -1; if (indexOfDigitSymbol == -1 && indexOfSpecAddressSymbols == -1 && indexOfTwoWhiteSpaces == -1 && found(indexOfSecondWhiteSpace)) { indexOfAddress = indexOfSecondWhiteSpace; } indexOfDigitSymbol = indexOfDigitSymbol == -1 ? details.length() : indexOfDigitSymbol; indexOfSpecAddressSymbols = indexOfSpecAddressSymbols == -1 ? details.length() : indexOfSpecAddressSymbols; indexOfTwoWhiteSpaces = indexOfTwoWhiteSpaces == -1 ? details.length() : indexOfTwoWhiteSpaces; if (indexOfSpecAddressSymbols < indexOfTwoWhiteSpaces && indexOfSpecAddressSymbols < indexOfDigitSymbol) { indexOfAddress = indexOfSpecAddressSymbols; } else if (indexOfTwoWhiteSpaces < indexOfSpecAddressSymbols && indexOfTwoWhiteSpaces < indexOfDigitSymbol) { indexOfAddress = indexOfTwoWhiteSpaces; } else if (indexOfDigitSymbol < indexOfSpecAddressSymbols && indexOfDigitSymbol < indexOfTwoWhiteSpaces) { indexOfAddress = indexOfSecondWhiteSpace; } return details.substring(0, indexOfAddress == -1 ? details.length() : indexOfAddress).trim(); } private static boolean potentiallyHasTitle(String text) { return text.toUpperCase().startsWith(MR_TITLE); } private static String tryDropTitle(String details) { int indexOfFirstSpace = details.indexOf(ONE_WHITE_SPACE); if (found(indexOfFirstSpace)) { String potentialTitle = extractPotentialTitle(details, indexOfFirstSpace); if (isTitle(potentialTitle)) { details = details.substring(indexOfFirstSpace).trim(); } } return details; } private static String extractPotentialTitle(String details, int endIndex) { return details.substring(0, endIndex).trim(); } private static boolean found(int index) { return index != -1; } private static boolean isTitle(String potentialTitle) { return MR_TITLE.equalsIgnoreCase(potentialTitle) || MRS_TITLE.equalsIgnoreCase(potentialTitle); } private static final String ADDRESS_FIELD_SEPARATOR = " "; public static String addressToSingleLine(ClientAddress address) { StringBuilder sb = new StringBuilder(); appendToAddress(sb, address.getLocation1(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getLocation2(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getLocation3(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getLocation4(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getLocation5(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getLocation6(), ADDRESS_FIELD_SEPARATOR); appendToAddress(sb, address.getPostalCode(), ADDRESS_FIELD_SEPARATOR); return sb.toString(); } private static void appendToAddress(StringBuilder sb, String value, String separator) { if (!StringUtils.isBlank(value)) { if (sb.length() > 0) { sb.append(separator); } sb.append(value.trim()); } } }
/* * Copyright 2002-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.config.annotation.web.configurers; import java.io.IOException; import java.util.List; import java.util.stream.Collectors; import javax.servlet.Filter; import javax.servlet.ServletException; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.mock.web.MockFilterChain; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.builders.WebSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.test.SpringTestContext; import org.springframework.security.config.test.SpringTestContextExtension; import org.springframework.security.core.userdetails.PasswordEncodedUser; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.provisioning.InMemoryUserDetailsManager; import org.springframework.security.web.DefaultSecurityFilterChain; import org.springframework.security.web.FilterChainProxy; import org.springframework.security.web.SecurityFilterChain; import org.springframework.security.web.access.ExceptionTranslationFilter; import org.springframework.security.web.access.intercept.FilterSecurityInterceptor; import org.springframework.security.web.authentication.AnonymousAuthenticationFilter; import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter; import org.springframework.security.web.authentication.logout.LogoutFilter; import org.springframework.security.web.context.SecurityContextPersistenceFilter; import org.springframework.security.web.context.request.async.WebAsyncManagerIntegrationFilter; import org.springframework.security.web.csrf.CsrfFilter; import org.springframework.security.web.csrf.CsrfToken; import org.springframework.security.web.csrf.DefaultCsrfToken; import org.springframework.security.web.csrf.HttpSessionCsrfTokenRepository; import org.springframework.security.web.header.HeaderWriterFilter; import org.springframework.security.web.savedrequest.RequestCacheAwareFilter; import org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestFilter; import org.springframework.security.web.session.SessionManagementFilter; import org.springframework.security.web.util.matcher.AnyRequestMatcher; import static org.assertj.core.api.Assertions.assertThat; /** * @author Rob Winch * @author Konstantin Volivach */ @ExtendWith(SpringTestContextExtension.class) public class DefaultFiltersTests { public final SpringTestContext spring = new SpringTestContext(this); @Test public void defaultTheWebSecurityConfigurerAdapter() { this.spring.register(FilterChainProxyBuilderMissingConfig.class); assertThat(this.spring.getContext().getBean(FilterChainProxy.class)).isNotNull(); } @Test public void nullWebInvocationPrivilegeEvaluator() { this.spring.register(NullWebInvocationPrivilegeEvaluatorConfig.class, UserDetailsServiceConfig.class); List<SecurityFilterChain> filterChains = this.spring.getContext().getBean(FilterChainProxy.class) .getFilterChains(); assertThat(filterChains.size()).isEqualTo(1); DefaultSecurityFilterChain filterChain = (DefaultSecurityFilterChain) filterChains.get(0); assertThat(filterChain.getRequestMatcher()).isInstanceOf(AnyRequestMatcher.class); assertThat(filterChain.getFilters().size()).isEqualTo(1); long filter = filterChain.getFilters().stream() .filter((it) -> it instanceof UsernamePasswordAuthenticationFilter).count(); assertThat(filter).isEqualTo(1); } @Test public void filterChainProxyBuilderIgnoringResources() { this.spring.register(FilterChainProxyBuilderIgnoringConfig.class, UserDetailsServiceConfig.class); List<SecurityFilterChain> filterChains = this.spring.getContext().getBean(FilterChainProxy.class) .getFilterChains(); assertThat(filterChains.size()).isEqualTo(2); DefaultSecurityFilterChain firstFilter = (DefaultSecurityFilterChain) filterChains.get(0); DefaultSecurityFilterChain secondFilter = (DefaultSecurityFilterChain) filterChains.get(1); assertThat(firstFilter.getFilters().isEmpty()).isEqualTo(true); assertThat(secondFilter.getRequestMatcher()).isInstanceOf(AnyRequestMatcher.class); List<? extends Class<? extends Filter>> classes = secondFilter.getFilters().stream().map(Filter::getClass) .collect(Collectors.toList()); assertThat(classes.contains(WebAsyncManagerIntegrationFilter.class)).isTrue(); assertThat(classes.contains(SecurityContextPersistenceFilter.class)).isTrue(); assertThat(classes.contains(HeaderWriterFilter.class)).isTrue(); assertThat(classes.contains(LogoutFilter.class)).isTrue(); assertThat(classes.contains(CsrfFilter.class)).isTrue(); assertThat(classes.contains(RequestCacheAwareFilter.class)).isTrue(); assertThat(classes.contains(SecurityContextHolderAwareRequestFilter.class)).isTrue(); assertThat(classes.contains(AnonymousAuthenticationFilter.class)).isTrue(); assertThat(classes.contains(SessionManagementFilter.class)).isTrue(); assertThat(classes.contains(ExceptionTranslationFilter.class)).isTrue(); assertThat(classes.contains(FilterSecurityInterceptor.class)).isTrue(); } @Test public void defaultFiltersPermitAll() throws IOException, ServletException { this.spring.register(DefaultFiltersConfigPermitAll.class, UserDetailsServiceConfig.class); MockHttpServletResponse response = new MockHttpServletResponse(); MockHttpServletRequest request = new MockHttpServletRequest("POST", ""); request.setServletPath("/logout"); CsrfToken csrfToken = new DefaultCsrfToken("X-CSRF-TOKEN", "_csrf", "BaseSpringSpec_CSRFTOKEN"); new HttpSessionCsrfTokenRepository().saveToken(csrfToken, request, response); request.setParameter(csrfToken.getParameterName(), csrfToken.getToken()); this.spring.getContext().getBean("springSecurityFilterChain", Filter.class).doFilter(request, response, new MockFilterChain()); assertThat(response.getRedirectedUrl()).isEqualTo("/login?logout"); } @EnableWebSecurity static class FilterChainProxyBuilderMissingConfig { @Autowired void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { // @formatter:off auth .inMemoryAuthentication() .withUser("user").password("password").roles("USER"); // @formatter:on } } @Configuration static class UserDetailsServiceConfig { @Bean UserDetailsService userDetailsService() { return new InMemoryUserDetailsManager(PasswordEncodedUser.user(), PasswordEncodedUser.admin()); } } @EnableWebSecurity static class NullWebInvocationPrivilegeEvaluatorConfig extends WebSecurityConfigurerAdapter { NullWebInvocationPrivilegeEvaluatorConfig() { super(true); } @Override protected void configure(HttpSecurity http) throws Exception { http.formLogin(); } } @EnableWebSecurity static class FilterChainProxyBuilderIgnoringConfig extends WebSecurityConfigurerAdapter { @Override public void configure(WebSecurity web) { // @formatter:off web .ignoring() .antMatchers("/resources/**"); // @formatter:on } @Override protected void configure(HttpSecurity http) throws Exception { // @formatter:off http .authorizeRequests() .anyRequest().hasRole("USER"); // @formatter:on } } @EnableWebSecurity static class DefaultFiltersConfigPermitAll extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) { } } }
package org.wso2.maven.p2.generate.feature; import java.io.*; import java.util.*; import java.util.prefs.Preferences; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.metadata.ArtifactMetadataSource; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.resolver.ArtifactNotFoundException; import org.apache.maven.artifact.resolver.ArtifactResolutionException; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.codehaus.plexus.util.DirectoryScanner; import org.codehaus.plexus.util.FileUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.wso2.maven.p2.generate.utils.FileManagementUtil; import org.wso2.maven.p2.generate.utils.P2Utils; import org.wso2.maven.p2.generate.utils.MavenUtils; import org.wso2.maven.p2.generate.utils.PropertyReplacer; /** * Write environment information for the current build to file. * * @goal p2-feature-gen * @phase package */ public class FeatureGenMojo extends AbstractMojo { /** * feature id * * @parameter * @required */ private String id; /** * version * * @parameter default-value="${project.version}" */ private String version; /** * label of the feature * * @parameter default-value="${project.name}" */ private String label; /** * description of the feature * * @parameter default-value="${project.description}" */ private String description; /** * provider name * * @parameter default-value="%providerName" */ private String providerName; /** * copyrite * * @parameter default-value="%copyright" */ private String copyright; /** * licence url * * @parameter default-value="%licenseURL" */ private String licenceUrl; /** * licence * * @parameter default-value="%license" */ private String licence; /** * path to manifest file * * @parameter */ private File manifest; /** * path to properties file * * @parameter */ private File propertiesFile; /** * list of properties * precedance over propertiesFile * * @parameter */ private Properties properties; /** * Collection of bundles * * @parameter */ private ArrayList bundles; /** * Collection of import bundles * * @parameter */ private ArrayList importBundles; /** * Collection of required Features * * @parameter */ private ArrayList importFeatures; /** * Collection of required Features * * @parameter */ private ArrayList includedFeatures; /** * define advice file content * * @parameter */ private AdviceFile adviceFile; // /** // * define category // * @parameter [alias="carbonCategories"] // */ // private String category; // /** * @component */ private org.apache.maven.artifact.factory.ArtifactFactory artifactFactory; /** * @component */ private org.apache.maven.artifact.resolver.ArtifactResolver resolver; /** * @parameter default-value="${localRepository}" */ private org.apache.maven.artifact.repository.ArtifactRepository localRepository; /** * @parameter default-value="${project.remoteArtifactRepositories}" */ private java.util.List remoteRepositories; /** * @parameter default-value="${project.distributionManagementArtifactRepository}" */ private ArtifactRepository deploymentRepository; /** * @component */ private ArtifactMetadataSource artifactMetadataSource; /** * @parameter default-value="${project}" */ private MavenProject project; /** * Maven ProjectHelper. * * @component */ private MavenProjectHelper projectHelper; private ArrayList<Bundle> processedBundles; private ArrayList<ImportBundle> processedImportBundles; private ArrayList<ImportFeature> processedImportfeatures; private ArrayList<Property> processedAdviceProperties; private ArrayList<IncludedFeature> processedIncludedFeatures; private File destFolder; private File featureBaseDir; private File featuresDir; private File FOLDER_FEATURES_FEATURE; private File pluginsDir; private File FOLDER_RESOURCES; private File FILE_FEATURE_XML; private File FILE_P2_INF; private File FILE_FEATURE_PROPERTIES; private File FILE_FEATURE_MANIFEST; private File FILE_FEATURE_ZIP; private boolean isPropertiesLoadedFromFile = false; public void execute() throws MojoExecutionException, MojoFailureException { getProcessedBundlesList(); getProcessedImportBundlesList(); getProcessedImportFeaturesList(); getProcessedAdviceProperties(); createAndSetupPaths(); copyResources(); createFeatureXml(); createPropertiesFile(); createManifestMFFile(); createP2Inf(); copyAllDependencies(); createArchive(); deployArtifact(); performMopUp(); } public void setVersion(String version) { this.version = version; } public String getVersion() { return version; } public void setLabel(String label) { this.label = label; } public String getLabel() { return label; } public void setDescription(String description) { this.description = description; } public String getDescription() { return description; } private ArrayList<Bundle> getProcessedBundlesList() throws MojoExecutionException { if (processedBundles != null) return processedBundles; if (bundles == null || bundles.size() == 0) return null; processedBundles = new ArrayList<Bundle>(); Iterator iter = bundles.iterator(); while (iter.hasNext()) { Object obj = iter.next(); Bundle b; if (obj instanceof Bundle) { b = (Bundle) obj; } else if (obj instanceof String) { b = Bundle.getBundle(obj.toString()); } else b = (Bundle) obj; b.resolveVersion(project); b.setArtifact(getResolvedArtifact(b)); processedBundles.add(b); } return processedBundles; } private ArrayList<ImportBundle> getProcessedImportBundlesList() throws MojoExecutionException { if (processedImportBundles != null) return processedImportBundles; if (importBundles == null || importBundles.size() == 0) return null; processedImportBundles = new ArrayList<ImportBundle>(); Iterator iter = importBundles.iterator(); while (iter.hasNext()) { Object obj = iter.next(); ImportBundle b; if (obj instanceof ImportBundle) { b = (ImportBundle) obj; } else if (obj instanceof String) { b = ImportBundle.getBundle(obj.toString()); } else b = (ImportBundle) obj; b.resolveVersion(project); if (!b.isExclude()) b.setArtifact(getResolvedArtifact(b)); else b.resolveOSGIInfo(); processedImportBundles.add(b); } return processedImportBundles; } private ArrayList<ImportFeature> getProcessedImportFeaturesList() throws MojoExecutionException { if (processedImportfeatures != null) return processedImportfeatures; if (importFeatures == null || importFeatures.size() == 0) return null; processedImportfeatures = new ArrayList<ImportFeature>(); Iterator iter = importFeatures.iterator(); while (iter.hasNext()) { Object obj = iter.next(); ImportFeature f; if (obj instanceof ImportFeature) { f = (ImportFeature) obj; } else if (obj instanceof String) { f = ImportFeature.getFeature(obj.toString()); } else f = (ImportFeature) obj; f.setFeatureVersion(project.getVersion()); processedImportfeatures.add(f); } return processedImportfeatures; } private ArrayList<IncludedFeature> getIncludedFeatures() throws MojoExecutionException { if (processedIncludedFeatures != null) return processedIncludedFeatures; if (includedFeatures == null || includedFeatures.size() == 0) return null; processedIncludedFeatures = new ArrayList<IncludedFeature>(includedFeatures.size()); for (Object obj : includedFeatures) { if (obj instanceof String) { IncludedFeature includedFeature = IncludedFeature.getIncludedFeature((String) obj); if (includedFeature != null) { includedFeature.setFeatureVersion(project.getVersion()); Artifact artifact = artifactFactory.createArtifact(includedFeature.getGroupId(), includedFeature.getArtifactId(), includedFeature.getArtifactVersion(), Artifact.SCOPE_RUNTIME, "zip"); includedFeature.setArtifact(MavenUtils.getResolvedArtifact(artifact, remoteRepositories, localRepository, resolver)); processedIncludedFeatures.add(includedFeature); } } } return processedIncludedFeatures; } private Artifact getResolvedArtifact(Bundle bundle) throws MojoExecutionException { Artifact artifact = artifactFactory.createArtifact(bundle.getGroupId(), bundle.getArtifactId(), bundle.getVersion(), Artifact.SCOPE_RUNTIME, "jar"); try { resolver.resolve(artifact, remoteRepositories, localRepository); } catch (ArtifactResolutionException e) { throw new MojoExecutionException("ERROR", e); } catch (ArtifactNotFoundException e) { throw new MojoExecutionException("ERROR", e); } return artifact; } private void createAndSetupPaths() { destFolder = new File(project.getBasedir(), "target"); featureBaseDir = new File(destFolder, "raw"); featuresDir = new File(featureBaseDir, "features"); FOLDER_FEATURES_FEATURE = new File(featuresDir, id + "_" + Bundle.getOSGIVersion(getVersion())); pluginsDir = new File(featureBaseDir, "plugins"); FOLDER_RESOURCES = new File(project.getBasedir(), "src"); File FOLDER_FEATURES_FEATURE_META_INF = new File(FOLDER_FEATURES_FEATURE, "META-INF"); FILE_FEATURE_XML = new File(FOLDER_FEATURES_FEATURE, "feature.xml"); FILE_FEATURE_PROPERTIES = new File(FOLDER_FEATURES_FEATURE, "feature.properties"); FILE_P2_INF = new File(FOLDER_FEATURES_FEATURE, "p2.inf"); FILE_FEATURE_MANIFEST = new File(FOLDER_FEATURES_FEATURE_META_INF, "MANIFEST.MF"); FILE_FEATURE_ZIP = new File(destFolder, project.getArtifactId() + "-" + project.getVersion() + ".zip"); FOLDER_FEATURES_FEATURE_META_INF.mkdirs(); pluginsDir.mkdirs(); } private Document getManifestDocument() throws MojoExecutionException { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder documentBuilder; try { documentBuilder = documentBuilderFactory.newDocumentBuilder(); } catch (ParserConfigurationException e1) { throw new MojoExecutionException("Unable to load feature manifest", e1); } Document document; if (getManifest() != null && getManifest().exists()) { try { document = documentBuilder.parse(new FileInputStream(getManifest())); } catch (Exception e) { throw new MojoExecutionException("Unable to load feature manifest", e); } } else document = documentBuilder.newDocument(); return document; } private void createFeatureXml() throws MojoExecutionException { getLog().info("Generating feature manifest"); Document document = getManifestDocument(); Element rootElement = document.getDocumentElement(); if (rootElement == null) { rootElement = document.createElement("feature"); document.appendChild(rootElement); } if (!rootElement.hasAttribute("id")) rootElement.setAttribute("id", id); if (!rootElement.hasAttribute("label")) rootElement.setAttribute("label", getLabel()); if (!rootElement.hasAttribute("version")) rootElement.setAttribute("version", Bundle.getOSGIVersion(getVersion())); if (!rootElement.hasAttribute("provider-name")) rootElement.setAttribute("provider-name", getProviderName()); NodeList descriptionTags = rootElement.getElementsByTagName("description"); Node description; if (descriptionTags.getLength() == 0) { description = document.createElement("description"); description.setTextContent(getDescription()); rootElement.appendChild(description); } else description = descriptionTags.item(0); NodeList copyrightTags = rootElement.getElementsByTagName("copyright"); Node copyright; if (copyrightTags.getLength() == 0) { copyright = document.createElement("copyright"); copyright.setTextContent(getCopyright()); rootElement.appendChild(copyright); } else copyright = copyrightTags.item(0); NodeList licenseTags = rootElement.getElementsByTagName("license"); Node license; if (licenseTags.getLength() == 0) { license = document.createElement("license"); ((Element) license).setAttribute("url", getLicenceUrl()); license.setTextContent(getLicence()); rootElement.appendChild(license); } else license = licenseTags.item(0); ArrayList<Object> processedMissingPlugins = getMissingPlugins(document); ArrayList<Object> processedMissingImportPlugins = getMissingImportPlugins(document); ArrayList<Object> processedMissingImportFeatures = getMissingImportFeatures(document); ArrayList<IncludedFeature> includedFeatures = getIncludedFeatures(); if (processedMissingPlugins != null) { for (Iterator<Object> iterator = processedMissingPlugins.iterator(); iterator.hasNext();) { Bundle bundle = (Bundle) iterator.next(); Element plugin = document.createElement("plugin"); plugin.setAttribute("id", bundle.getBundleSymbolicName()); plugin.setAttribute("version", bundle.getBundleVersion()); plugin.setAttribute("unpack", "false"); rootElement.appendChild(plugin); } } if (processedMissingImportPlugins != null || processedMissingImportFeatures != null) { NodeList requireNodes = document.getElementsByTagName("require"); Node require; if (requireNodes == null || requireNodes.getLength() == 0) { require = document.createElement("require"); rootElement.appendChild(require); } else require = requireNodes.item(0); if (processedMissingImportPlugins != null) { for (Iterator<Object> iterator = processedMissingImportPlugins.iterator(); iterator.hasNext();) { ImportBundle bundle = (ImportBundle) iterator.next(); Element plugin = document.createElement("import"); plugin.setAttribute("plugin", bundle.getBundleSymbolicName()); plugin.setAttribute("version", bundle.getBundleVersion()); plugin.setAttribute("match", P2Utils.getMatchRule(bundle.getCompatibility())); require.appendChild(plugin); } } if (processedMissingImportFeatures != null) { for (Object processedMissingImportFeature : processedMissingImportFeatures) { ImportFeature feature = (ImportFeature) processedMissingImportFeature; if (!feature.isOptional()) { Element plugin = document.createElement("import"); plugin.setAttribute("feature", feature.getFeatureId()); plugin.setAttribute("version", feature.getFeatureVersion()); if (P2Utils.isPatch(feature.getCompatibility())) plugin.setAttribute("patch", "true"); else plugin.setAttribute("match", P2Utils.getMatchRule(feature.getCompatibility())); require.appendChild(plugin); } } } } if (includedFeatures != null) { for (IncludedFeature includedFeature : includedFeatures) { Element includeElement = document.createElement("includes"); includeElement.setAttribute("id", includedFeature.getFeatureID()); includeElement.setAttribute("version", includedFeature.getFeatureVersion()); includeElement.setAttribute("optional", Boolean.toString(includedFeature.isOptional())); rootElement.appendChild(includeElement); } } if(processedMissingImportFeatures != null) { for (Object processedMissingImportFeature : processedMissingImportFeatures) { ImportFeature feature = (ImportFeature) processedMissingImportFeature; if(feature.isOptional()){ Element includeElement = document.createElement("includes"); includeElement.setAttribute("id", feature.getFeatureId()); includeElement.setAttribute("version", feature.getFeatureVersion()); includeElement.setAttribute("optional", Boolean.toString(feature.isOptional())); rootElement.appendChild(includeElement); } } } try { TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer; transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(document); StreamResult result = new StreamResult(FILE_FEATURE_XML); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2"); transformer.transform(source, result); } catch (Exception e) { throw new MojoExecutionException("Unable to create feature manifest", e); } } private ArrayList<Object> getMissingPlugins(Document document) throws MojoExecutionException { HashMap<String, Bundle> missingPlugins = new HashMap<String, Bundle>(); ArrayList<Bundle> processedBundlesList = getProcessedBundlesList(); if (processedBundlesList == null) return null; for (Iterator<Bundle> iterator = processedBundlesList.iterator(); iterator .hasNext();) { Bundle bundle = iterator.next(); missingPlugins.put(bundle.getArtifactId(), bundle); } NodeList existingPlugins = document.getDocumentElement().getElementsByTagName("plugin"); for (int i = 0; i < existingPlugins.getLength(); i++) { Node node = existingPlugins.item(i); Node namedItem = node.getAttributes().getNamedItem("id"); if (namedItem != null && namedItem.getTextContent() != null && missingPlugins.containsKey(namedItem.getTextContent())) { missingPlugins.remove(namedItem.getTextContent()); } } return returnArrayList(missingPlugins.values().toArray()); } private void createPropertiesFile() throws MojoExecutionException { Properties props = getProperties(); if (props == null) return; if (!props.isEmpty()) try { getLog().info("Generating feature properties"); props.store(new FileOutputStream(FILE_FEATURE_PROPERTIES), "Properties of " + id); } catch (Exception e) { throw new MojoExecutionException("Unable to create the feature properties", e); } } private void createManifestMFFile() throws MojoExecutionException { try { getLog().info("Generating MANIFEST.MF"); BufferedWriter out = new BufferedWriter(new FileWriter(FILE_FEATURE_MANIFEST)); out.write("Manifest-Version: 1.0\n\n"); out.close(); } catch (Exception e) {//Catch exception if any throw new MojoExecutionException("Unable to create manifest file", e); } } private void createP2Inf() throws MojoExecutionException { BufferedWriter out = null; List<String> p2infStringList = null; try { ArrayList<Property> list = getProcessedAdviceProperties(); if (FILE_P2_INF.exists()) { p2infStringList= readAdviceFile(FILE_P2_INF.getAbsolutePath()); //In memory storage of current p2.inf content getLog().info("Updating Advice file (p2.inf)"); } else { getLog().info("Generating Advice file (p2.inf)"); } out = new BufferedWriter(new FileWriter(FILE_P2_INF.getAbsolutePath())); //re-writing the already availabled p2.inf lines Properties properties = new Properties(); properties.setProperty("feature.version",Bundle.getOSGIVersion(getVersion())); if(p2infStringList != null && p2infStringList.size() > 0){ for(String str : p2infStringList){ out.write(PropertyReplacer.replaceProperties(str,properties)+"\n"); // writing the strings after replacing ${feature.version} } } if (list.size() == 0) return; // finally block will take care of output stream closing. int nextIndex = P2Utils.getLastIndexOfProperties(FILE_P2_INF) + 1; for (Object category : list) { Property cat = (Property) category; out.write("\nproperties." + nextIndex + ".name=" + cat.getKey()); out.write("\nproperties." + nextIndex + ".value=" + cat.getValue()); nextIndex++; } } catch (Exception e) { throw new MojoExecutionException("Unable to create/open p2.inf file", e); } finally { if (out != null) try { out.close(); } catch (IOException e) { throw new MojoExecutionException("Unable to finalize p2.inf file", e); } } } private List<String> readAdviceFile(String absolutePath) throws MojoExecutionException { List<String> stringList = new ArrayList<String>(); String inputLine = null; BufferedReader br = null; try { br = new BufferedReader(new FileReader(absolutePath)); while ((inputLine = br.readLine()) != null) { stringList.add(inputLine); } br.close(); } catch (FileNotFoundException e) { throw new MojoExecutionException("Unable to create/open p2.inf file", e); } catch (IOException e) { throw new MojoExecutionException("Error while reading from p2.inf file", e); }finally { if(br != null) { try { br.close(); } catch (IOException e) { throw new MojoExecutionException("Unable to finalize p2.inf file", e); } } } return stringList; } private ArrayList<Object> getMissingImportPlugins(Document document) throws MojoExecutionException { HashMap<String, ImportBundle> missingImportPlugins = new HashMap<String, ImportBundle>(); ArrayList<ImportBundle> processedImportBundlesList = getProcessedImportBundlesList(); if (processedImportBundlesList == null) return null; for (Iterator<ImportBundle> iterator = processedImportBundlesList.iterator(); iterator.hasNext();) { ImportBundle bundle = iterator.next(); missingImportPlugins.put(bundle.getArtifactId(), bundle); } NodeList requireNodeList = document.getDocumentElement().getElementsByTagName("require"); if (requireNodeList == null || requireNodeList.getLength() == 0) return returnArrayList(missingImportPlugins.values().toArray()); Node requireNode = requireNodeList.item(0); if (requireNode instanceof Element) { Element requireElement = (Element) requireNode; NodeList importNodes = requireElement.getElementsByTagName("import"); if (importNodes == null) return returnArrayList(missingImportPlugins.values().toArray()); for (int i = 0; i < importNodes.getLength(); i++) { Node node = importNodes.item(i); Node namedItem = node.getAttributes().getNamedItem("plugin"); if (namedItem != null && namedItem.getTextContent() != null && missingImportPlugins.containsKey(namedItem.getTextContent())) { missingImportPlugins.remove(namedItem.getTextContent()); } } } return returnArrayList(missingImportPlugins.values().toArray()); } private ArrayList<Object> getMissingImportFeatures(Document document) throws MojoExecutionException { HashMap<String, ImportFeature> missingImportFeatures = new HashMap<String, ImportFeature>(); ArrayList<ImportFeature> processedImportFeaturesList = getProcessedImportFeaturesList(); if (processedImportFeaturesList == null) return null; for (Iterator<ImportFeature> iterator = processedImportFeaturesList.iterator(); iterator.hasNext();) { ImportFeature feature = iterator.next(); missingImportFeatures.put(feature.getFeatureId(), feature); } NodeList requireNodeList = document.getDocumentElement().getElementsByTagName("require"); if (requireNodeList == null || requireNodeList.getLength() == 0) return returnArrayList(missingImportFeatures.values().toArray()); Node requireNode = requireNodeList.item(0); if (requireNode instanceof Element) { Element requireElement = (Element) requireNode; NodeList importNodes = requireElement.getElementsByTagName("import"); if (importNodes == null) return returnArrayList(missingImportFeatures.values().toArray()); for (int i = 0; i < importNodes.getLength(); i++) { Node node = importNodes.item(i); Node namedItem = node.getAttributes().getNamedItem("feature"); if (namedItem != null && namedItem.getTextContent() != null && missingImportFeatures.containsKey(namedItem.getTextContent())) { missingImportFeatures.remove(namedItem.getTextContent()); } } } return returnArrayList(missingImportFeatures.values().toArray()); } private ArrayList<Object> returnArrayList(Object[] arr) { ArrayList<Object> arrayList = new ArrayList<Object>(); for (Object object : arr) { arrayList.add(object); } return arrayList; } public void setProviderName(String providerName) { this.providerName = providerName; } public String getProviderName() { return providerName; } public void setCopyright(String copyrite) { this.copyright = copyrite; } public String getCopyright() { return copyright; } public void setLicenceUrl(String licenceUrl) { this.licenceUrl = licenceUrl; } public String getLicenceUrl() { return licenceUrl; } public void setLicence(String licence) { this.licence = licence; } public String getLicence() { return licence; } public void setManifest(File manifest) { this.manifest = manifest; } public File getManifest() { return manifest; } public void setPropertiesFile(File propertiesFile) { this.propertiesFile = propertiesFile; } public File getPropertiesFile() { return propertiesFile; } public void setProperties(Properties properties) { this.properties = properties; } public Properties getProperties() throws MojoExecutionException { if (!isPropertiesLoadedFromFile) { isPropertiesLoadedFromFile = true; if (getPropertiesFile() != null && getPropertiesFile().exists()) { Properties props = new Properties(); try { props.load(new FileInputStream(getPropertiesFile())); } catch (Exception e) { throw new MojoExecutionException("Unable to load the given properties file", e); } if (properties != null) { for (Object key : properties.keySet().toArray()) { props.setProperty(key.toString(), properties.getProperty(key.toString())); } } setProperties(props); } } return properties; } private ArrayList<Property> getProcessedAdviceProperties() throws MojoExecutionException { if (processedAdviceProperties != null) return processedAdviceProperties; processedAdviceProperties = new ArrayList<Property>(); ; if (adviceFile != null && adviceFile.getProperties() != null) { for (Object property : adviceFile.getProperties()) { Property prop = null; if (property instanceof Property) prop = (Property) property; else if (property instanceof String) prop = Property.getProperty(property.toString()); else throw new MojoExecutionException("Unknown advice property definition: " + property.toString()); processedAdviceProperties.add(prop); } } return processedAdviceProperties; } private void copyAllDependencies() throws MojoExecutionException { ArrayList<Bundle> processedBundlesList = getProcessedBundlesList(); if (processedBundlesList != null) { getLog().info("Copying bundle dependencies"); for (Iterator<Bundle> iterator = processedBundlesList.iterator(); iterator.hasNext();) { Bundle bundle = iterator.next(); try { getLog().info(" " + bundle.toOSGIString()); String bundleName = bundle.getBundleSymbolicName() + "-" + bundle.getBundleVersion() + ".jar"; FileUtils.copyFile(bundle.getArtifact().getFile(), new File(pluginsDir, bundleName)); } catch (IOException e) { throw new MojoExecutionException("Unable copy dependency: " + bundle.getArtifactId(), e); } } } ArrayList<ImportBundle> processedImportBundlesList = getProcessedImportBundlesList(); if (processedImportBundlesList != null) { getLog().info("Copying import bundle dependencies"); for (Iterator<ImportBundle> iterator = processedImportBundlesList.iterator(); iterator.hasNext();) { ImportBundle bundle = iterator.next(); try { if (!bundle.isExclude()) { getLog().info(" " + bundle.toOSGIString()); String bundleName = bundle.getBundleSymbolicName() + "-" + bundle.getBundleVersion() + ".jar"; FileUtils.copyFile(bundle.getArtifact().getFile(), new File(pluginsDir, bundleName)); } } catch (IOException e) { throw new MojoExecutionException("Unable copy import dependency: " + bundle.getArtifactId(), e); } } } //Copying includedFeatures if (processedIncludedFeatures != null) { for (IncludedFeature includedFeature : processedIncludedFeatures) { try { getLog().info("Extracting feature " + includedFeature.getGroupId() + ":" + includedFeature.getArtifactId()); FileManagementUtil.unzip(includedFeature.getArtifact().getFile(), featureBaseDir); } catch (Exception e) { throw new MojoExecutionException("Error occured when extracting the Feature Artifact: " + includedFeature.getGroupId() + ":" + includedFeature.getArtifactId(), e); } } } } private void createArchive() throws MojoExecutionException { getLog().info("Generating feature archive: " + FILE_FEATURE_ZIP.getAbsolutePath()); FileManagementUtil.zipFolder(featureBaseDir.getAbsolutePath(), FILE_FEATURE_ZIP.getAbsolutePath()); } private void deployArtifact() { if (FILE_FEATURE_ZIP != null && FILE_FEATURE_ZIP.exists()) { project.getArtifact().setFile(FILE_FEATURE_ZIP); projectHelper.attachArtifact(project, "zip", null, FILE_FEATURE_ZIP); } } private void copyResources() throws MojoExecutionException { //The following code was taken from the maven bundle plugin and updated suit the purpose List<Resource> resources = project.getResources(); for (Resource resource : resources){ String sourcePath = resource.getDirectory(); if (new File(sourcePath).exists()){ DirectoryScanner scanner = new DirectoryScanner(); scanner.setBasedir( resource.getDirectory() ); if ( resource.getIncludes() != null && !resource.getIncludes().isEmpty() ){ scanner.setIncludes((String[])resource.getIncludes().toArray(new String[]{})); }else{ scanner.setIncludes(new String[]{"**/**"}); } List<String> excludes = resource.getExcludes(); if (excludes != null && !excludes.isEmpty()){ scanner.setExcludes((String[])excludes.toArray(new String[]{})); } scanner.addDefaultExcludes(); scanner.scan(); List<String> includedFiles = Arrays.asList( scanner.getIncludedFiles() ); getLog().info(" " + resource.getDirectory()); for (String name: includedFiles){ File fromPath=new File(sourcePath,name); File toPath=new File(FOLDER_FEATURES_FEATURE,name); try { if (fromPath.isDirectory() && !toPath.exists()){ toPath.mkdirs(); }else{ FileManagementUtil.copy(fromPath, toPath); } } catch (IOException e) { throw new MojoExecutionException("Unable copy resources: " + resource.getDirectory(), e); } } } } // List resources = project.getResources(); // if (resources != null) { // getLog().info("Copying resources"); // for (Object obj : resources) { // if (obj instanceof Resource) { // Resource resource = (Resource) obj; // try { // File resourceFolder = new File(resource.getDirectory()); // if (resourceFolder.exists()) { // getLog().info(" " + resource.getDirectory()); // FileManagementUtil.copyDirectory(resourceFolder, FOLDER_FEATURES_FEATURE); // } // } catch (IOException e) { // throw new MojoExecutionException("Unable copy resources: " + resource.getDirectory(), e); // } // } // } // } } private void performMopUp() { try { FileUtils.deleteDirectory(featureBaseDir); } catch (Exception e) { getLog().warn(new MojoExecutionException("Unable complete mop up operation", e)); } } }
package net.piemaster.jario.systems.handling; import net.piemaster.jario.components.Acceleration; import net.piemaster.jario.components.Audible; import net.piemaster.jario.components.CollisionMesh; import net.piemaster.jario.components.Health; import net.piemaster.jario.components.Item; import net.piemaster.jario.components.Jumping; import net.piemaster.jario.components.Physical; import net.piemaster.jario.components.SpatialForm; import net.piemaster.jario.components.Transform; import net.piemaster.jario.components.Velocity; import net.piemaster.jario.entities.EntityType; import net.piemaster.jario.systems.CollisionSystem.EdgeType; import net.piemaster.jario.systems.handling.utils.CollisionCommand; import com.artemis.Component; import com.artemis.ComponentMapper; import com.artemis.Entity; public abstract class EmptyHandlingSystem extends PersistentEntityHandlingSystem { // Convenience mappers protected ComponentMapper<Transform> transformMapper; protected ComponentMapper<Velocity> velocityMapper; protected ComponentMapper<Acceleration> accelMapper; protected ComponentMapper<Physical> physicalMapper; protected ComponentMapper<Health> healthMapper; protected ComponentMapper<SpatialForm> spatialMapper; protected ComponentMapper<CollisionMesh> meshMapper; protected ComponentMapper<Item> itemMapper; protected ComponentMapper<Jumping> jumpMapper; protected ComponentMapper<Audible> audibleMapper; /** * Extension constructor. */ public EmptyHandlingSystem(Class<? extends Component> requiredType, Class<? extends Component>... otherTypes) { super(requiredType, otherTypes); } @Override /** * Initialise the mappers and register the empty handlers. */ public void initialize() { super.initialize(); // Create component mappers transformMapper = new ComponentMapper<Transform>(Transform.class, world); velocityMapper = new ComponentMapper<Velocity>(Velocity.class, world); accelMapper = new ComponentMapper<Acceleration>(Acceleration.class, world); physicalMapper = new ComponentMapper<Physical>(Physical.class, world); healthMapper = new ComponentMapper<Health>(Health.class, world); spatialMapper = new ComponentMapper<SpatialForm>(SpatialForm.class, world); meshMapper = new ComponentMapper<CollisionMesh>(CollisionMesh.class, world); itemMapper = new ComponentMapper<Item>(Item.class, world); jumpMapper = new ComponentMapper<Jumping>(Jumping.class, world); audibleMapper = new ComponentMapper<Audible>(Audible.class, world); // Register collision handlers handlers.put(EntityType.TERRAIN.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handleTerrainCollision(source, target, edge); } }); handlers.put(EntityType.ENEMY.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handleEnemyCollision(source, target, edge); } }); handlers.put(EntityType.ITEM.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handleItemCollision(source, target, edge); } }); handlers.put(EntityType.ITEMBOX.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handleItemBoxCollision(source, target, edge); } }); handlers.put(EntityType.WEAPON.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handleWeaponCollision(source, target, edge); } }); handlers.put(EntityType.PLAYER.toString(), new CollisionCommand() { @Override public void handle(Entity source, Entity target, EdgeType edge) { handlePlayerCollision(source, target, edge); } }); } // ------------------------------------------------------------------------- // Empty collision handler implementations/hooks // ------------------------------------------------------------------------- protected void handleTerrainCollision(Entity source, Entity target, EdgeType edge) { } protected void handleEnemyCollision(Entity source, Entity target, EdgeType edge) { } protected void handleItemCollision(Entity source, Entity target, EdgeType edge) { } protected void handleItemBoxCollision(Entity source, Entity target, EdgeType edge) { } protected void handleWeaponCollision(Entity source, Entity target, EdgeType edge) { } protected void handlePlayerCollision(Entity source, Entity target, EdgeType edge) { } // ------------------------------------------------------------------------- // Utility methods // ------------------------------------------------------------------------- /** * Set position and stop relevant movement. */ public void placeEntityOnOther(Entity e1, Entity e2, EdgeType edge) { CollisionMesh m1 = meshMapper.get(e1); CollisionMesh m2 = meshMapper.get(e2); Transform t1 = transformMapper.get(e1); Velocity v1 = velocityMapper.get(e1); Acceleration a1 = accelMapper.get(e1); Physical phys = physicalMapper.get(e1); Jumping jump = jumpMapper.get(e1); if (edge == EdgeType.EDGE_TOP) { // Set the Y coordinate to that of the terrain object t1.setY(m2.getY() - m1.getHeight()); if (phys.isBouncyVertical() && jump != null) { v1.setY(-jump.getJumpFactor()); } else { if (v1.getY() > 0) { haltVertical(e1); } } // Record that the actor is on the ground to avoid gravity phys.setGrounded(true); } else if (edge == EdgeType.EDGE_BOTTOM) { t1.setY(m2.getY() + m2.getHeight()); if (v1.getY() < 0) { haltVertical(e1); } phys.setGrounded(false); } else if (edge == EdgeType.EDGE_LEFT || edge == EdgeType.EDGE_RIGHT) { float diff = (edge == EdgeType.EDGE_LEFT) ? -m1.getWidth() : m2.getWidth(); t1.setX(m2.getX() + diff); if (phys.isBouncyHorizontal()) { // If walking INTO the placed edge int vDir = (int) Math.signum(v1.getX()); int edgeDir = edge == EdgeType.EDGE_LEFT ? 1 : -1; if (vDir == edgeDir) { t1.setFacingRight(!t1.isFacingRight()); v1.setX(-v1.getX()); if (a1 != null) a1.reverse(true, false); } } else { // haltHorizontal(e1); } } } public void haltVertical(Entity ent) { ent.getComponent(Acceleration.class).setY(0); ent.getComponent(Velocity.class).setY(0); } public void haltHorizontal(Entity ent) { ent.getComponent(Acceleration.class).setX(0); ent.getComponent(Velocity.class).setX(0); } protected EdgeType reverseEdge(EdgeType edge) { switch (edge) { case EDGE_LEFT: return EdgeType.EDGE_RIGHT; case EDGE_RIGHT: return EdgeType.EDGE_LEFT; case EDGE_TOP: return EdgeType.EDGE_BOTTOM; case EDGE_BOTTOM: return EdgeType.EDGE_TOP; default: return EdgeType.EDGE_NONE; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.websocket; import org.apache.nifi.processor.Processor; import org.apache.nifi.websocket.jetty.JettyWebSocketClient; import org.apache.nifi.websocket.jetty.JettyWebSocketServer; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import java.net.ServerSocket; import java.nio.ByteBuffer; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; public class TestJettyWebSocketCommunication { protected int serverPort; protected String serverPath = "/test"; protected WebSocketServerService serverService; protected ControllerServiceTestContext serverServiceContext; protected WebSocketClientService clientService; protected ControllerServiceTestContext clientServiceContext; protected boolean isSecure() { return false; } @Before public void setup() throws Exception { setupServer(); setupClient(); } private void setupServer() throws Exception { // Find an open port. try (final ServerSocket serverSocket = new ServerSocket(0)) { serverPort = serverSocket.getLocalPort(); } serverService = new JettyWebSocketServer(); serverServiceContext = new ControllerServiceTestContext(serverService, "JettyWebSocketServer1"); serverServiceContext.setCustomValue(JettyWebSocketServer.LISTEN_PORT, String.valueOf(serverPort)); customizeServer(); serverService.initialize(serverServiceContext.getInitializationContext()); serverService.startServer(serverServiceContext.getConfigurationContext()); } protected void customizeServer() { } private void setupClient() throws Exception { clientService = new JettyWebSocketClient(); clientServiceContext = new ControllerServiceTestContext(clientService, "JettyWebSocketClient1"); clientServiceContext.setCustomValue(JettyWebSocketClient.WS_URI, (isSecure() ? "wss" : "ws") + "://localhost:" + serverPort + serverPath); customizeClient(); clientService.initialize(clientServiceContext.getInitializationContext()); clientService.startClient(clientServiceContext.getConfigurationContext()); } protected void customizeClient() { } @After public void teardown() throws Exception { clientService.stopClient(); serverService.stopServer(); } protected interface MockWebSocketProcessor extends Processor, ConnectedListener, TextMessageConsumer, BinaryMessageConsumer { } @Test public void testClientServerCommunication() throws Exception { // Expectations. final CountDownLatch serverIsConnectedByClient = new CountDownLatch(1); final CountDownLatch clientConnectedServer = new CountDownLatch(1); final CountDownLatch serverReceivedTextMessageFromClient = new CountDownLatch(1); final CountDownLatch serverReceivedBinaryMessageFromClient = new CountDownLatch(1); final CountDownLatch clientReceivedTextMessageFromServer = new CountDownLatch(1); final CountDownLatch clientReceivedBinaryMessageFromServer = new CountDownLatch(1); final String textMessageFromClient = "Message from client."; final String textMessageFromServer = "Message from server."; final MockWebSocketProcessor serverProcessor = mock(MockWebSocketProcessor.class); doReturn("serverProcessor1").when(serverProcessor).getIdentifier(); final AtomicReference<String> serverSessionIdRef = new AtomicReference<>(); doAnswer(invocation -> assertConnectedEvent(serverIsConnectedByClient, serverSessionIdRef, invocation)) .when(serverProcessor).connected(any(WebSocketSessionInfo.class)); doAnswer(invocation -> assertConsumeTextMessage(serverReceivedTextMessageFromClient, textMessageFromClient, invocation)) .when(serverProcessor).consume(any(WebSocketSessionInfo.class), anyString()); doAnswer(invocation -> assertConsumeBinaryMessage(serverReceivedBinaryMessageFromClient, textMessageFromClient, invocation)) .when(serverProcessor).consume(any(WebSocketSessionInfo.class), any(byte[].class), anyInt(), anyInt()); serverService.registerProcessor(serverPath, serverProcessor); final String clientId = "client1"; final MockWebSocketProcessor clientProcessor = mock(MockWebSocketProcessor.class); doReturn("clientProcessor1").when(clientProcessor).getIdentifier(); final AtomicReference<String> clientSessionIdRef = new AtomicReference<>(); doAnswer(invocation -> assertConnectedEvent(clientConnectedServer, clientSessionIdRef, invocation)) .when(clientProcessor).connected(any(WebSocketSessionInfo.class)); doAnswer(invocation -> assertConsumeTextMessage(clientReceivedTextMessageFromServer, textMessageFromServer, invocation)) .when(clientProcessor).consume(any(WebSocketSessionInfo.class), anyString()); doAnswer(invocation -> assertConsumeBinaryMessage(clientReceivedBinaryMessageFromServer, textMessageFromServer, invocation)) .when(clientProcessor).consume(any(WebSocketSessionInfo.class), any(byte[].class), anyInt(), anyInt()); clientService.registerProcessor(clientId, clientProcessor); clientService.connect(clientId); assertTrue("WebSocket client should be able to fire connected event.", clientConnectedServer.await(5, TimeUnit.SECONDS)); assertTrue("WebSocket server should be able to fire connected event.", serverIsConnectedByClient.await(5, TimeUnit.SECONDS)); clientService.sendMessage(clientId, clientSessionIdRef.get(), sender -> sender.sendString(textMessageFromClient)); clientService.sendMessage(clientId, clientSessionIdRef.get(), sender -> sender.sendBinary(ByteBuffer.wrap(textMessageFromClient.getBytes()))); assertTrue("WebSocket server should be able to consume text message.", serverReceivedTextMessageFromClient.await(5, TimeUnit.SECONDS)); assertTrue("WebSocket server should be able to consume binary message.", serverReceivedBinaryMessageFromClient.await(5, TimeUnit.SECONDS)); serverService.sendMessage(serverPath, serverSessionIdRef.get(), sender -> sender.sendString(textMessageFromServer)); serverService.sendMessage(serverPath, serverSessionIdRef.get(), sender -> sender.sendBinary(ByteBuffer.wrap(textMessageFromServer.getBytes()))); assertTrue("WebSocket client should be able to consume text message.", clientReceivedTextMessageFromServer.await(5, TimeUnit.SECONDS)); assertTrue("WebSocket client should be able to consume binary message.", clientReceivedBinaryMessageFromServer.await(5, TimeUnit.SECONDS)); clientService.deregisterProcessor(clientId, clientProcessor); serverService.deregisterProcessor(serverPath, serverProcessor); } protected Object assertConnectedEvent(CountDownLatch latch, AtomicReference<String> sessionIdRef, InvocationOnMock invocation) { final WebSocketSessionInfo sessionInfo = invocation.getArgumentAt(0, WebSocketSessionInfo.class); assertNotNull(sessionInfo.getLocalAddress()); assertNotNull(sessionInfo.getRemoteAddress()); assertNotNull(sessionInfo.getSessionId()); assertEquals(isSecure(), sessionInfo.isSecure()); sessionIdRef.set(sessionInfo.getSessionId()); latch.countDown(); return null; } protected Object assertConsumeTextMessage(CountDownLatch latch, String expectedMessage, InvocationOnMock invocation) { final WebSocketSessionInfo sessionInfo = invocation.getArgumentAt(0, WebSocketSessionInfo.class); assertNotNull(sessionInfo.getLocalAddress()); assertNotNull(sessionInfo.getRemoteAddress()); assertNotNull(sessionInfo.getSessionId()); assertEquals(isSecure(), sessionInfo.isSecure()); final String receivedMessage = invocation.getArgumentAt(1, String.class); assertNotNull(receivedMessage); assertEquals(expectedMessage, receivedMessage); latch.countDown(); return null; } protected Object assertConsumeBinaryMessage(CountDownLatch latch, String expectedMessage, InvocationOnMock invocation) { final WebSocketSessionInfo sessionInfo = invocation.getArgumentAt(0, WebSocketSessionInfo.class); assertNotNull(sessionInfo.getLocalAddress()); assertNotNull(sessionInfo.getRemoteAddress()); assertNotNull(sessionInfo.getSessionId()); assertEquals(isSecure(), sessionInfo.isSecure()); final byte[] receivedMessage = invocation.getArgumentAt(1, byte[].class); final byte[] expectedBinary = expectedMessage.getBytes(); final int offset = invocation.getArgumentAt(2, Integer.class); final int length = invocation.getArgumentAt(3, Integer.class); assertNotNull(receivedMessage); assertEquals(expectedBinary.length, receivedMessage.length); assertEquals(expectedMessage, new String(receivedMessage)); assertEquals(0, offset); assertEquals(expectedBinary.length, length); latch.countDown(); return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.spatial4j.core.context; import com.spatial4j.core.distance.CartesianDistCalc; import com.spatial4j.core.distance.DistanceCalculator; import com.spatial4j.core.distance.GeodesicSphereDistCalc; import com.spatial4j.core.io.BinaryCodec; import com.spatial4j.core.io.WktShapeParser; import com.spatial4j.core.shape.Rectangle; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.util.Arrays; import java.util.Map; /** * Factory for a {@link SpatialContext} based on configuration data. Call * {@link #makeSpatialContext(java.util.Map, ClassLoader)} to construct one via String name-value * pairs. To construct one via code then create a factory instance, set the fields, then call * {@link #newSpatialContext()}. * <p/> * The following keys are looked up in the args map: * <DL> * <DT>spatialContextFactory</DT> * <DD>com.spatial4j.core.context.SpatialContext or * com.spatial4j.core.context.jts.JtsSpatialContext</DD> * <DT>geo</DT> * <DD>true (default)| false -- see {@link SpatialContext#isGeo()} </DD> * <DT>distCalculator</DT> * <DD>haversine | lawOfCosines | vincentySphere | cartesian | cartesian^2 * -- see {@link DistanceCalculator}</DD> * <DT>worldBounds</DT> * <DD>{@code ENVELOPE(xMin, xMax, yMax, yMin)} -- see {@link SpatialContext#getWorldBounds()}</DD> * <DT>normWrapLongitude</DT> * <DD>true | false (default) -- see {@link SpatialContext#isNormWrapLongitude()}</DD> * </DL> */ public class SpatialContextFactory { /** Set by {@link #makeSpatialContext(java.util.Map, ClassLoader)}. */ protected Map<String, String> args; /** Set by {@link #makeSpatialContext(java.util.Map, ClassLoader)}. */ protected ClassLoader classLoader; /* These fields are public to make it easy to set them without bothering with setters. */ public boolean geo = true; public DistanceCalculator distCalc;//defaults in SpatialContext c'tor based on geo public Rectangle worldBounds;//defaults in SpatialContext c'tor based on geo public boolean normWrapLongitude = false; public Class<? extends WktShapeParser> wktShapeParserClass = WktShapeParser.class; public Class<? extends BinaryCodec> binaryCodecClass = BinaryCodec.class; /** * Creates a new {@link SpatialContext} based on configuration in * <code>args</code>. See the class definition for what keys are looked up * in it. * The factory class is looked up via "spatialContextFactory" in args * then falling back to a Java system property (with initial caps). If neither are specified * then {@link SpatialContextFactory} is chosen. * * @param args Non-null map of name-value pairs. * @param classLoader Optional, except when a class name is provided to an * argument. */ public static SpatialContext makeSpatialContext(Map<String,String> args, ClassLoader classLoader) { if (classLoader == null) classLoader = SpatialContextFactory.class.getClassLoader(); SpatialContextFactory instance; String cname = args.get("spatialContextFactory"); if (cname == null) cname = System.getProperty("SpatialContextFactory"); if (cname == null) instance = new SpatialContextFactory(); else { try { Class c = classLoader.loadClass(cname); instance = (SpatialContextFactory) c.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } instance.init(args,classLoader); return instance.newSpatialContext(); } protected void init(Map<String, String> args, ClassLoader classLoader) { this.args = args; this.classLoader = classLoader; initField("geo"); initCalculator(); //init wktParser before worldBounds because WB needs to be parsed initField("wktShapeParserClass"); initWorldBounds(); initField("normWrapLongitude"); initField("binaryCodecClass"); } /** Gets {@code name} from args and populates a field by the same name with the value. */ protected void initField(String name) { // note: java.beans API is more verbose to use correctly (?) but would arguably be better Field field; try { field = getClass().getField(name); } catch (NoSuchFieldException e) { throw new Error(e); } String str = args.get(name); if (str != null) { try { Object o; if (field.getType() == Boolean.TYPE) { o = Boolean.valueOf(str); } else if (field.getType() == Class.class) { try { o = classLoader.loadClass(str); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } else if (field.getType().isEnum()) { o = Enum.valueOf(field.getType().asSubclass(Enum.class), str); } else { throw new Error("unsupported field type: "+field.getType());//not plausible at runtime unless developing } field.set(this, o); } catch (IllegalAccessException e) { throw new Error(e); } catch (Exception e) { throw new RuntimeException( "Invalid value '"+str+"' on field "+name+" of type "+field.getType(), e); } } } protected void initCalculator() { String calcStr = args.get("distCalculator"); if (calcStr == null) return; if (calcStr.equalsIgnoreCase("haversine")) { distCalc = new GeodesicSphereDistCalc.Haversine(); } else if (calcStr.equalsIgnoreCase("lawOfCosines")) { distCalc = new GeodesicSphereDistCalc.LawOfCosines(); } else if (calcStr.equalsIgnoreCase("vincentySphere")) { distCalc = new GeodesicSphereDistCalc.Vincenty(); } else if (calcStr.equalsIgnoreCase("cartesian")) { distCalc = new CartesianDistCalc(); } else if (calcStr.equalsIgnoreCase("cartesian^2")) { distCalc = new CartesianDistCalc(true); } else { throw new RuntimeException("Unknown calculator: "+calcStr); } } protected void initWorldBounds() { String worldBoundsStr = args.get("worldBounds"); if (worldBoundsStr == null) return; //kinda ugly we do this just to read a rectangle. TODO refactor final SpatialContext ctx = newSpatialContext(); worldBounds = (Rectangle) ctx.readShape(worldBoundsStr);//TODO use readShapeFromWkt } /** Subclasses should simply construct the instance from the initialized configuration. */ public SpatialContext newSpatialContext() { return new SpatialContext(this); } public WktShapeParser makeWktShapeParser(SpatialContext ctx) { return makeClassInstance(wktShapeParserClass, ctx, this); } public BinaryCodec makeBinaryCodec(SpatialContext ctx) { return makeClassInstance(binaryCodecClass, ctx, this); } @SuppressWarnings("unchecked") private <T> T makeClassInstance(Class<? extends T> clazz, Object... ctorArgs) { try { //can't simply lookup constructor by arg type because might be subclass type ctorLoop: for (Constructor<?> ctor : clazz.getConstructors()) { Class[] parameterTypes = ctor.getParameterTypes(); if (parameterTypes.length != ctorArgs.length) continue; for (int i = 0; i < ctorArgs.length; i++) { Object ctorArg = ctorArgs[i]; if (!parameterTypes[i].isAssignableFrom(ctorArg.getClass())) continue ctorLoop; } return clazz.cast(ctor.newInstance(ctorArgs)); } } catch (Exception e) { throw new RuntimeException(e); } throw new RuntimeException(clazz + " needs a constructor that takes: " + Arrays.toString(ctorArgs)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.fn.harness.state; import static org.apache.beam.runners.core.construction.ModelCoders.STATE_BACKED_ITERABLE_CODER_URN; import com.google.auto.service.AutoService; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest; import org.apache.beam.runners.core.construction.CoderTranslation.TranslationContext; import org.apache.beam.runners.core.construction.CoderTranslator; import org.apache.beam.runners.core.construction.CoderTranslatorRegistrar; import org.apache.beam.sdk.coders.IterableLikeCoder; import org.apache.beam.sdk.fn.stream.DataStreams; import org.apache.beam.sdk.util.BufferedElementCountingOutputStream; import org.apache.beam.sdk.util.VarInt; import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams; /** * A {@link BeamFnStateClient state} backed iterable which allows for fetching elements over the * portability state API. See <a * href="https://s.apache.org/beam-fn-state-api-and-bundle-processing">remote references</a> for * additional details. * * <p>One must supply a {@link StateBackedIterableTranslationContext} when using {@link * CoderTranslator#fromComponents} to be able to create a {@link StateBackedIterable.Coder}. */ public class StateBackedIterable<T> implements Iterable<T> { private final BeamFnStateClient beamFnStateClient; private final org.apache.beam.sdk.coders.Coder<T> elemCoder; @VisibleForTesting final StateRequest request; @VisibleForTesting final List<T> prefix; public StateBackedIterable( BeamFnStateClient beamFnStateClient, String instructionId, ByteString runnerKey, org.apache.beam.sdk.coders.Coder<T> elemCoder, List<T> prefix) { this.beamFnStateClient = beamFnStateClient; this.elemCoder = elemCoder; StateRequest.Builder requestBuilder = StateRequest.newBuilder(); requestBuilder .setInstructionId(instructionId) .getStateKeyBuilder() .getRunnerBuilder() .setKey(runnerKey); this.request = requestBuilder.build(); this.prefix = prefix; } @Override public Iterator<T> iterator() { return Iterators.concat( prefix.iterator(), new DataStreams.DataStreamDecoder( elemCoder, DataStreams.inbound( StateFetchingIterators.readAllStartingFrom(beamFnStateClient, request)))); } /** * Decodes an {@link Iterable} that might be backed by state. If the terminator at the end of the * value stream is {@code -1} then we return a {@link StateBackedIterable} otherwise we return an * {@link Iterable}. */ public static class Coder<T> extends IterableLikeCoder<T, Iterable<T>> { private final BeamFnStateClient beamFnStateClient; private final Supplier<String> instructionId; public Coder( BeamFnStateClient beamFnStateClient, Supplier<String> instructionId, org.apache.beam.sdk.coders.Coder<T> elemCoder) { super(elemCoder, "StateBackedIterable"); this.beamFnStateClient = beamFnStateClient; this.instructionId = instructionId; } @Override protected Iterable<T> decodeToIterable(List<T> decodedElements) { return decodedElements; } @Override protected Iterable<T> decodeToIterable( List<T> decodedElements, long terminatorValue, InputStream in) throws IOException { if (terminatorValue == -1L) { long tokenLength = VarInt.decodeLong(in); ByteString token = ByteString.readFrom(ByteStreams.limit(in, tokenLength)); return new StateBackedIterable<>( beamFnStateClient, instructionId.get(), token, getElemCoder(), decodedElements); } else { throw new IllegalStateException( String.format( "StateBackedIterable expected terminator of 0 or -1 but received %s.", terminatorValue)); } } @Override public void encode(Iterable<T> iterable, OutputStream outStream) throws IOException { if (!(iterable instanceof StateBackedIterable)) { super.encode(iterable, outStream); return; } StateBackedIterable<T> stateBackedIterable = (StateBackedIterable<T>) iterable; DataOutputStream dataOutStream = new DataOutputStream(outStream); // We don't know the size without traversing it so use a fixed size buffer // and encode as many elements as possible into it before outputting the size followed // by the elements. dataOutStream.writeInt(-1); BufferedElementCountingOutputStream countingOutputStream = new BufferedElementCountingOutputStream(dataOutStream, -1L); // Encode only the prefix for (T elem : stateBackedIterable.prefix) { countingOutputStream.markElementStart(); getElemCoder().encode(elem, countingOutputStream); } countingOutputStream.finish(); // Make sure all our output gets pushed to the underlying outStream. dataOutStream.flush(); // Append 'len(token) token' after the -1 stream terminator. VarInt.encode( stateBackedIterable.request.getStateKey().getRunner().getKey().size(), outStream); stateBackedIterable.request.getStateKey().getRunner().getKey().writeTo(outStream); } } /** Additional parameters required by the {@link StateBackedIterable.Coder}. */ public interface StateBackedIterableTranslationContext extends TranslationContext { BeamFnStateClient getStateClient(); Supplier<String> getCurrentInstructionId(); } /** A {@link CoderTranslatorRegistrar} for {@code beam:coder:state_backed_iterable:v1}. */ @AutoService(CoderTranslatorRegistrar.class) public static class Registrar implements CoderTranslatorRegistrar { @Override public Map<Class<? extends org.apache.beam.sdk.coders.Coder>, String> getCoderURNs() { return Collections.singletonMap( StateBackedIterable.Coder.class, STATE_BACKED_ITERABLE_CODER_URN); } @Override public Map< Class<? extends org.apache.beam.sdk.coders.Coder>, CoderTranslator<? extends org.apache.beam.sdk.coders.Coder>> getCoderTranslators() { return ImmutableMap.of(StateBackedIterable.Coder.class, new Translator()); } } /** * A {@link CoderTranslator} for {@code beam:coder:state_backed_iterable:v1}. * * <p>One must supply a {@link StateBackedIterableTranslationContext} during {@link * CoderTranslator#fromComponents} to be able to successfully create an instance. */ private static class Translator implements CoderTranslator<StateBackedIterable.Coder<?>> { @Override public List<? extends org.apache.beam.sdk.coders.Coder<?>> getComponents( StateBackedIterable.Coder<?> from) { return Collections.<org.apache.beam.sdk.coders.Coder<?>>singletonList(from.getElemCoder()); } @Override public StateBackedIterable.Coder<?> fromComponents( List<org.apache.beam.sdk.coders.Coder<?>> components, byte[] payload, TranslationContext context) { if (context instanceof StateBackedIterableTranslationContext) { return new StateBackedIterable.Coder<>( ((StateBackedIterableTranslationContext) context).getStateClient(), ((StateBackedIterableTranslationContext) context).getCurrentInstructionId(), Iterables.getOnlyElement(components)); } else { throw new IllegalStateException( String.format( "Unable to construct coder %s. Expected translation context %s but received %s.", STATE_BACKED_ITERABLE_CODER_URN, StateBackedIterableTranslationContext.class.getName(), context.getClass().getName())); } } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.redshift.model; import java.io.Serializable; /** * <p> * Describes the result of a cluster resize operation. * </p> */ public class DescribeResizeResult implements Serializable, Cloneable { /** * <p> * The node type that the cluster will have after the resize operation is * complete. * </p> */ private String targetNodeType; /** * <p> * The number of nodes that the cluster will have after the resize operation * is complete. * </p> */ private Integer targetNumberOfNodes; /** * <p> * The cluster type after the resize operation is complete. * </p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> * </p> */ private String targetClusterType; /** * <p> * The status of the resize operation. * </p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> * </p> */ private String status; /** * <p> * The names of tables that have been completely imported . * </p> * <p> * Valid Values: List of table names. * </p> */ private com.amazonaws.internal.SdkInternalList<String> importTablesCompleted; /** * <p> * The names of tables that are being currently imported. * </p> * <p> * Valid Values: List of table names. * </p> */ private com.amazonaws.internal.SdkInternalList<String> importTablesInProgress; /** * <p> * The names of tables that have not been yet imported. * </p> * <p> * Valid Values: List of table names * </p> */ private com.amazonaws.internal.SdkInternalList<String> importTablesNotStarted; /** * <p> * The average rate of the resize operation over the last few minutes, * measured in megabytes per second. After the resize operation completes, * this value shows the average rate of the entire resize operation. * </p> */ private Double avgResizeRateInMegaBytesPerSecond; /** * <p> * The estimated total amount of data, in megabytes, on the cluster before * the resize operation began. * </p> */ private Long totalResizeDataInMegaBytes; /** * <p> * While the resize operation is in progress, this value shows the current * amount of data, in megabytes, that has been processed so far. When the * resize operation is complete, this value shows the total amount of data, * in megabytes, on the cluster, which may be more or less than * TotalResizeDataInMegaBytes (the estimated total amount of data before * resize). * </p> */ private Long progressInMegaBytes; /** * <p> * The amount of seconds that have elapsed since the resize operation began. * After the resize operation completes, this value shows the total actual * time, in seconds, for the resize operation. * </p> */ private Long elapsedTimeInSeconds; /** * <p> * The estimated time remaining, in seconds, until the resize operation is * complete. This value is calculated based on the average resize rate and * the estimated amount of data remaining to be processed. Once the resize * operation is complete, this value will be 0. * </p> */ private Long estimatedTimeToCompletionInSeconds; /** * <p> * The node type that the cluster will have after the resize operation is * complete. * </p> * * @param targetNodeType * The node type that the cluster will have after the resize * operation is complete. */ public void setTargetNodeType(String targetNodeType) { this.targetNodeType = targetNodeType; } /** * <p> * The node type that the cluster will have after the resize operation is * complete. * </p> * * @return The node type that the cluster will have after the resize * operation is complete. */ public String getTargetNodeType() { return this.targetNodeType; } /** * <p> * The node type that the cluster will have after the resize operation is * complete. * </p> * * @param targetNodeType * The node type that the cluster will have after the resize * operation is complete. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withTargetNodeType(String targetNodeType) { setTargetNodeType(targetNodeType); return this; } /** * <p> * The number of nodes that the cluster will have after the resize operation * is complete. * </p> * * @param targetNumberOfNodes * The number of nodes that the cluster will have after the resize * operation is complete. */ public void setTargetNumberOfNodes(Integer targetNumberOfNodes) { this.targetNumberOfNodes = targetNumberOfNodes; } /** * <p> * The number of nodes that the cluster will have after the resize operation * is complete. * </p> * * @return The number of nodes that the cluster will have after the resize * operation is complete. */ public Integer getTargetNumberOfNodes() { return this.targetNumberOfNodes; } /** * <p> * The number of nodes that the cluster will have after the resize operation * is complete. * </p> * * @param targetNumberOfNodes * The number of nodes that the cluster will have after the resize * operation is complete. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withTargetNumberOfNodes( Integer targetNumberOfNodes) { setTargetNumberOfNodes(targetNumberOfNodes); return this; } /** * <p> * The cluster type after the resize operation is complete. * </p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> * </p> * * @param targetClusterType * The cluster type after the resize operation is complete.</p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> */ public void setTargetClusterType(String targetClusterType) { this.targetClusterType = targetClusterType; } /** * <p> * The cluster type after the resize operation is complete. * </p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> * </p> * * @return The cluster type after the resize operation is complete.</p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> */ public String getTargetClusterType() { return this.targetClusterType; } /** * <p> * The cluster type after the resize operation is complete. * </p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> * </p> * * @param targetClusterType * The cluster type after the resize operation is complete.</p> * <p> * Valid Values: <code>multi-node</code> | <code>single-node</code> * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withTargetClusterType(String targetClusterType) { setTargetClusterType(targetClusterType); return this; } /** * <p> * The status of the resize operation. * </p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> * </p> * * @param status * The status of the resize operation.</p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the resize operation. * </p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> * </p> * * @return The status of the resize operation.</p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> */ public String getStatus() { return this.status; } /** * <p> * The status of the resize operation. * </p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> * </p> * * @param status * The status of the resize operation.</p> * <p> * Valid Values: <code>NONE</code> | <code>IN_PROGRESS</code> | * <code>FAILED</code> | <code>SUCCEEDED</code> * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withStatus(String status) { setStatus(status); return this; } /** * <p> * The names of tables that have been completely imported . * </p> * <p> * Valid Values: List of table names. * </p> * * @return The names of tables that have been completely imported .</p> * <p> * Valid Values: List of table names. */ public java.util.List<String> getImportTablesCompleted() { if (importTablesCompleted == null) { importTablesCompleted = new com.amazonaws.internal.SdkInternalList<String>(); } return importTablesCompleted; } /** * <p> * The names of tables that have been completely imported . * </p> * <p> * Valid Values: List of table names. * </p> * * @param importTablesCompleted * The names of tables that have been completely imported .</p> * <p> * Valid Values: List of table names. */ public void setImportTablesCompleted( java.util.Collection<String> importTablesCompleted) { if (importTablesCompleted == null) { this.importTablesCompleted = null; return; } this.importTablesCompleted = new com.amazonaws.internal.SdkInternalList<String>( importTablesCompleted); } /** * <p> * The names of tables that have been completely imported . * </p> * <p> * Valid Values: List of table names. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setImportTablesCompleted(java.util.Collection)} or * {@link #withImportTablesCompleted(java.util.Collection)} if you want to * override the existing values. * </p> * * @param importTablesCompleted * The names of tables that have been completely imported .</p> * <p> * Valid Values: List of table names. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesCompleted( String... importTablesCompleted) { if (this.importTablesCompleted == null) { setImportTablesCompleted(new com.amazonaws.internal.SdkInternalList<String>( importTablesCompleted.length)); } for (String ele : importTablesCompleted) { this.importTablesCompleted.add(ele); } return this; } /** * <p> * The names of tables that have been completely imported . * </p> * <p> * Valid Values: List of table names. * </p> * * @param importTablesCompleted * The names of tables that have been completely imported .</p> * <p> * Valid Values: List of table names. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesCompleted( java.util.Collection<String> importTablesCompleted) { setImportTablesCompleted(importTablesCompleted); return this; } /** * <p> * The names of tables that are being currently imported. * </p> * <p> * Valid Values: List of table names. * </p> * * @return The names of tables that are being currently imported.</p> * <p> * Valid Values: List of table names. */ public java.util.List<String> getImportTablesInProgress() { if (importTablesInProgress == null) { importTablesInProgress = new com.amazonaws.internal.SdkInternalList<String>(); } return importTablesInProgress; } /** * <p> * The names of tables that are being currently imported. * </p> * <p> * Valid Values: List of table names. * </p> * * @param importTablesInProgress * The names of tables that are being currently imported.</p> * <p> * Valid Values: List of table names. */ public void setImportTablesInProgress( java.util.Collection<String> importTablesInProgress) { if (importTablesInProgress == null) { this.importTablesInProgress = null; return; } this.importTablesInProgress = new com.amazonaws.internal.SdkInternalList<String>( importTablesInProgress); } /** * <p> * The names of tables that are being currently imported. * </p> * <p> * Valid Values: List of table names. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setImportTablesInProgress(java.util.Collection)} or * {@link #withImportTablesInProgress(java.util.Collection)} if you want to * override the existing values. * </p> * * @param importTablesInProgress * The names of tables that are being currently imported.</p> * <p> * Valid Values: List of table names. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesInProgress( String... importTablesInProgress) { if (this.importTablesInProgress == null) { setImportTablesInProgress(new com.amazonaws.internal.SdkInternalList<String>( importTablesInProgress.length)); } for (String ele : importTablesInProgress) { this.importTablesInProgress.add(ele); } return this; } /** * <p> * The names of tables that are being currently imported. * </p> * <p> * Valid Values: List of table names. * </p> * * @param importTablesInProgress * The names of tables that are being currently imported.</p> * <p> * Valid Values: List of table names. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesInProgress( java.util.Collection<String> importTablesInProgress) { setImportTablesInProgress(importTablesInProgress); return this; } /** * <p> * The names of tables that have not been yet imported. * </p> * <p> * Valid Values: List of table names * </p> * * @return The names of tables that have not been yet imported.</p> * <p> * Valid Values: List of table names */ public java.util.List<String> getImportTablesNotStarted() { if (importTablesNotStarted == null) { importTablesNotStarted = new com.amazonaws.internal.SdkInternalList<String>(); } return importTablesNotStarted; } /** * <p> * The names of tables that have not been yet imported. * </p> * <p> * Valid Values: List of table names * </p> * * @param importTablesNotStarted * The names of tables that have not been yet imported.</p> * <p> * Valid Values: List of table names */ public void setImportTablesNotStarted( java.util.Collection<String> importTablesNotStarted) { if (importTablesNotStarted == null) { this.importTablesNotStarted = null; return; } this.importTablesNotStarted = new com.amazonaws.internal.SdkInternalList<String>( importTablesNotStarted); } /** * <p> * The names of tables that have not been yet imported. * </p> * <p> * Valid Values: List of table names * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setImportTablesNotStarted(java.util.Collection)} or * {@link #withImportTablesNotStarted(java.util.Collection)} if you want to * override the existing values. * </p> * * @param importTablesNotStarted * The names of tables that have not been yet imported.</p> * <p> * Valid Values: List of table names * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesNotStarted( String... importTablesNotStarted) { if (this.importTablesNotStarted == null) { setImportTablesNotStarted(new com.amazonaws.internal.SdkInternalList<String>( importTablesNotStarted.length)); } for (String ele : importTablesNotStarted) { this.importTablesNotStarted.add(ele); } return this; } /** * <p> * The names of tables that have not been yet imported. * </p> * <p> * Valid Values: List of table names * </p> * * @param importTablesNotStarted * The names of tables that have not been yet imported.</p> * <p> * Valid Values: List of table names * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withImportTablesNotStarted( java.util.Collection<String> importTablesNotStarted) { setImportTablesNotStarted(importTablesNotStarted); return this; } /** * <p> * The average rate of the resize operation over the last few minutes, * measured in megabytes per second. After the resize operation completes, * this value shows the average rate of the entire resize operation. * </p> * * @param avgResizeRateInMegaBytesPerSecond * The average rate of the resize operation over the last few * minutes, measured in megabytes per second. After the resize * operation completes, this value shows the average rate of the * entire resize operation. */ public void setAvgResizeRateInMegaBytesPerSecond( Double avgResizeRateInMegaBytesPerSecond) { this.avgResizeRateInMegaBytesPerSecond = avgResizeRateInMegaBytesPerSecond; } /** * <p> * The average rate of the resize operation over the last few minutes, * measured in megabytes per second. After the resize operation completes, * this value shows the average rate of the entire resize operation. * </p> * * @return The average rate of the resize operation over the last few * minutes, measured in megabytes per second. After the resize * operation completes, this value shows the average rate of the * entire resize operation. */ public Double getAvgResizeRateInMegaBytesPerSecond() { return this.avgResizeRateInMegaBytesPerSecond; } /** * <p> * The average rate of the resize operation over the last few minutes, * measured in megabytes per second. After the resize operation completes, * this value shows the average rate of the entire resize operation. * </p> * * @param avgResizeRateInMegaBytesPerSecond * The average rate of the resize operation over the last few * minutes, measured in megabytes per second. After the resize * operation completes, this value shows the average rate of the * entire resize operation. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withAvgResizeRateInMegaBytesPerSecond( Double avgResizeRateInMegaBytesPerSecond) { setAvgResizeRateInMegaBytesPerSecond(avgResizeRateInMegaBytesPerSecond); return this; } /** * <p> * The estimated total amount of data, in megabytes, on the cluster before * the resize operation began. * </p> * * @param totalResizeDataInMegaBytes * The estimated total amount of data, in megabytes, on the cluster * before the resize operation began. */ public void setTotalResizeDataInMegaBytes(Long totalResizeDataInMegaBytes) { this.totalResizeDataInMegaBytes = totalResizeDataInMegaBytes; } /** * <p> * The estimated total amount of data, in megabytes, on the cluster before * the resize operation began. * </p> * * @return The estimated total amount of data, in megabytes, on the cluster * before the resize operation began. */ public Long getTotalResizeDataInMegaBytes() { return this.totalResizeDataInMegaBytes; } /** * <p> * The estimated total amount of data, in megabytes, on the cluster before * the resize operation began. * </p> * * @param totalResizeDataInMegaBytes * The estimated total amount of data, in megabytes, on the cluster * before the resize operation began. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withTotalResizeDataInMegaBytes( Long totalResizeDataInMegaBytes) { setTotalResizeDataInMegaBytes(totalResizeDataInMegaBytes); return this; } /** * <p> * While the resize operation is in progress, this value shows the current * amount of data, in megabytes, that has been processed so far. When the * resize operation is complete, this value shows the total amount of data, * in megabytes, on the cluster, which may be more or less than * TotalResizeDataInMegaBytes (the estimated total amount of data before * resize). * </p> * * @param progressInMegaBytes * While the resize operation is in progress, this value shows the * current amount of data, in megabytes, that has been processed so * far. When the resize operation is complete, this value shows the * total amount of data, in megabytes, on the cluster, which may be * more or less than TotalResizeDataInMegaBytes (the estimated total * amount of data before resize). */ public void setProgressInMegaBytes(Long progressInMegaBytes) { this.progressInMegaBytes = progressInMegaBytes; } /** * <p> * While the resize operation is in progress, this value shows the current * amount of data, in megabytes, that has been processed so far. When the * resize operation is complete, this value shows the total amount of data, * in megabytes, on the cluster, which may be more or less than * TotalResizeDataInMegaBytes (the estimated total amount of data before * resize). * </p> * * @return While the resize operation is in progress, this value shows the * current amount of data, in megabytes, that has been processed so * far. When the resize operation is complete, this value shows the * total amount of data, in megabytes, on the cluster, which may be * more or less than TotalResizeDataInMegaBytes (the estimated total * amount of data before resize). */ public Long getProgressInMegaBytes() { return this.progressInMegaBytes; } /** * <p> * While the resize operation is in progress, this value shows the current * amount of data, in megabytes, that has been processed so far. When the * resize operation is complete, this value shows the total amount of data, * in megabytes, on the cluster, which may be more or less than * TotalResizeDataInMegaBytes (the estimated total amount of data before * resize). * </p> * * @param progressInMegaBytes * While the resize operation is in progress, this value shows the * current amount of data, in megabytes, that has been processed so * far. When the resize operation is complete, this value shows the * total amount of data, in megabytes, on the cluster, which may be * more or less than TotalResizeDataInMegaBytes (the estimated total * amount of data before resize). * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withProgressInMegaBytes(Long progressInMegaBytes) { setProgressInMegaBytes(progressInMegaBytes); return this; } /** * <p> * The amount of seconds that have elapsed since the resize operation began. * After the resize operation completes, this value shows the total actual * time, in seconds, for the resize operation. * </p> * * @param elapsedTimeInSeconds * The amount of seconds that have elapsed since the resize operation * began. After the resize operation completes, this value shows the * total actual time, in seconds, for the resize operation. */ public void setElapsedTimeInSeconds(Long elapsedTimeInSeconds) { this.elapsedTimeInSeconds = elapsedTimeInSeconds; } /** * <p> * The amount of seconds that have elapsed since the resize operation began. * After the resize operation completes, this value shows the total actual * time, in seconds, for the resize operation. * </p> * * @return The amount of seconds that have elapsed since the resize * operation began. After the resize operation completes, this value * shows the total actual time, in seconds, for the resize * operation. */ public Long getElapsedTimeInSeconds() { return this.elapsedTimeInSeconds; } /** * <p> * The amount of seconds that have elapsed since the resize operation began. * After the resize operation completes, this value shows the total actual * time, in seconds, for the resize operation. * </p> * * @param elapsedTimeInSeconds * The amount of seconds that have elapsed since the resize operation * began. After the resize operation completes, this value shows the * total actual time, in seconds, for the resize operation. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withElapsedTimeInSeconds( Long elapsedTimeInSeconds) { setElapsedTimeInSeconds(elapsedTimeInSeconds); return this; } /** * <p> * The estimated time remaining, in seconds, until the resize operation is * complete. This value is calculated based on the average resize rate and * the estimated amount of data remaining to be processed. Once the resize * operation is complete, this value will be 0. * </p> * * @param estimatedTimeToCompletionInSeconds * The estimated time remaining, in seconds, until the resize * operation is complete. This value is calculated based on the * average resize rate and the estimated amount of data remaining to * be processed. Once the resize operation is complete, this value * will be 0. */ public void setEstimatedTimeToCompletionInSeconds( Long estimatedTimeToCompletionInSeconds) { this.estimatedTimeToCompletionInSeconds = estimatedTimeToCompletionInSeconds; } /** * <p> * The estimated time remaining, in seconds, until the resize operation is * complete. This value is calculated based on the average resize rate and * the estimated amount of data remaining to be processed. Once the resize * operation is complete, this value will be 0. * </p> * * @return The estimated time remaining, in seconds, until the resize * operation is complete. This value is calculated based on the * average resize rate and the estimated amount of data remaining to * be processed. Once the resize operation is complete, this value * will be 0. */ public Long getEstimatedTimeToCompletionInSeconds() { return this.estimatedTimeToCompletionInSeconds; } /** * <p> * The estimated time remaining, in seconds, until the resize operation is * complete. This value is calculated based on the average resize rate and * the estimated amount of data remaining to be processed. Once the resize * operation is complete, this value will be 0. * </p> * * @param estimatedTimeToCompletionInSeconds * The estimated time remaining, in seconds, until the resize * operation is complete. This value is calculated based on the * average resize rate and the estimated amount of data remaining to * be processed. Once the resize operation is complete, this value * will be 0. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeResizeResult withEstimatedTimeToCompletionInSeconds( Long estimatedTimeToCompletionInSeconds) { setEstimatedTimeToCompletionInSeconds(estimatedTimeToCompletionInSeconds); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTargetNodeType() != null) sb.append("TargetNodeType: " + getTargetNodeType() + ","); if (getTargetNumberOfNodes() != null) sb.append("TargetNumberOfNodes: " + getTargetNumberOfNodes() + ","); if (getTargetClusterType() != null) sb.append("TargetClusterType: " + getTargetClusterType() + ","); if (getStatus() != null) sb.append("Status: " + getStatus() + ","); if (getImportTablesCompleted() != null) sb.append("ImportTablesCompleted: " + getImportTablesCompleted() + ","); if (getImportTablesInProgress() != null) sb.append("ImportTablesInProgress: " + getImportTablesInProgress() + ","); if (getImportTablesNotStarted() != null) sb.append("ImportTablesNotStarted: " + getImportTablesNotStarted() + ","); if (getAvgResizeRateInMegaBytesPerSecond() != null) sb.append("AvgResizeRateInMegaBytesPerSecond: " + getAvgResizeRateInMegaBytesPerSecond() + ","); if (getTotalResizeDataInMegaBytes() != null) sb.append("TotalResizeDataInMegaBytes: " + getTotalResizeDataInMegaBytes() + ","); if (getProgressInMegaBytes() != null) sb.append("ProgressInMegaBytes: " + getProgressInMegaBytes() + ","); if (getElapsedTimeInSeconds() != null) sb.append("ElapsedTimeInSeconds: " + getElapsedTimeInSeconds() + ","); if (getEstimatedTimeToCompletionInSeconds() != null) sb.append("EstimatedTimeToCompletionInSeconds: " + getEstimatedTimeToCompletionInSeconds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeResizeResult == false) return false; DescribeResizeResult other = (DescribeResizeResult) obj; if (other.getTargetNodeType() == null ^ this.getTargetNodeType() == null) return false; if (other.getTargetNodeType() != null && other.getTargetNodeType().equals(this.getTargetNodeType()) == false) return false; if (other.getTargetNumberOfNodes() == null ^ this.getTargetNumberOfNodes() == null) return false; if (other.getTargetNumberOfNodes() != null && other.getTargetNumberOfNodes().equals( this.getTargetNumberOfNodes()) == false) return false; if (other.getTargetClusterType() == null ^ this.getTargetClusterType() == null) return false; if (other.getTargetClusterType() != null && other.getTargetClusterType().equals( this.getTargetClusterType()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getImportTablesCompleted() == null ^ this.getImportTablesCompleted() == null) return false; if (other.getImportTablesCompleted() != null && other.getImportTablesCompleted().equals( this.getImportTablesCompleted()) == false) return false; if (other.getImportTablesInProgress() == null ^ this.getImportTablesInProgress() == null) return false; if (other.getImportTablesInProgress() != null && other.getImportTablesInProgress().equals( this.getImportTablesInProgress()) == false) return false; if (other.getImportTablesNotStarted() == null ^ this.getImportTablesNotStarted() == null) return false; if (other.getImportTablesNotStarted() != null && other.getImportTablesNotStarted().equals( this.getImportTablesNotStarted()) == false) return false; if (other.getAvgResizeRateInMegaBytesPerSecond() == null ^ this.getAvgResizeRateInMegaBytesPerSecond() == null) return false; if (other.getAvgResizeRateInMegaBytesPerSecond() != null && other.getAvgResizeRateInMegaBytesPerSecond().equals( this.getAvgResizeRateInMegaBytesPerSecond()) == false) return false; if (other.getTotalResizeDataInMegaBytes() == null ^ this.getTotalResizeDataInMegaBytes() == null) return false; if (other.getTotalResizeDataInMegaBytes() != null && other.getTotalResizeDataInMegaBytes().equals( this.getTotalResizeDataInMegaBytes()) == false) return false; if (other.getProgressInMegaBytes() == null ^ this.getProgressInMegaBytes() == null) return false; if (other.getProgressInMegaBytes() != null && other.getProgressInMegaBytes().equals( this.getProgressInMegaBytes()) == false) return false; if (other.getElapsedTimeInSeconds() == null ^ this.getElapsedTimeInSeconds() == null) return false; if (other.getElapsedTimeInSeconds() != null && other.getElapsedTimeInSeconds().equals( this.getElapsedTimeInSeconds()) == false) return false; if (other.getEstimatedTimeToCompletionInSeconds() == null ^ this.getEstimatedTimeToCompletionInSeconds() == null) return false; if (other.getEstimatedTimeToCompletionInSeconds() != null && other.getEstimatedTimeToCompletionInSeconds().equals( this.getEstimatedTimeToCompletionInSeconds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTargetNodeType() == null) ? 0 : getTargetNodeType() .hashCode()); hashCode = prime * hashCode + ((getTargetNumberOfNodes() == null) ? 0 : getTargetNumberOfNodes().hashCode()); hashCode = prime * hashCode + ((getTargetClusterType() == null) ? 0 : getTargetClusterType().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getImportTablesCompleted() == null) ? 0 : getImportTablesCompleted().hashCode()); hashCode = prime * hashCode + ((getImportTablesInProgress() == null) ? 0 : getImportTablesInProgress().hashCode()); hashCode = prime * hashCode + ((getImportTablesNotStarted() == null) ? 0 : getImportTablesNotStarted().hashCode()); hashCode = prime * hashCode + ((getAvgResizeRateInMegaBytesPerSecond() == null) ? 0 : getAvgResizeRateInMegaBytesPerSecond().hashCode()); hashCode = prime * hashCode + ((getTotalResizeDataInMegaBytes() == null) ? 0 : getTotalResizeDataInMegaBytes().hashCode()); hashCode = prime * hashCode + ((getProgressInMegaBytes() == null) ? 0 : getProgressInMegaBytes().hashCode()); hashCode = prime * hashCode + ((getElapsedTimeInSeconds() == null) ? 0 : getElapsedTimeInSeconds().hashCode()); hashCode = prime * hashCode + ((getEstimatedTimeToCompletionInSeconds() == null) ? 0 : getEstimatedTimeToCompletionInSeconds().hashCode()); return hashCode; } @Override public DescribeResizeResult clone() { try { return (DescribeResizeResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/////////////////////////////////////////////////////////////////////////////// // Copyright (c) 2002, Eric D. Friedman All Rights Reserved. // Copyright (c) 2009, Robert D. Eden All Rights Reserved. // Copyright (c) 2009, Jeff Randall All Rights Reserved. // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. /////////////////////////////////////////////////////////////////////////////// package gnu.trove.decorator; import gnu.trove.map.TByteFloatMap; import gnu.trove.iterator.TByteFloatIterator; import java.io.*; import java.util.*; ////////////////////////////////////////////////// // THIS IS A GENERATED CLASS. DO NOT HAND EDIT! // ////////////////////////////////////////////////// /** * Wrapper class to make a TByteFloatMap conform to the <tt>java.util.Map</tt> API. * This class simply decorates an underlying TByteFloatMap and translates the Object-based * APIs into their Trove primitive analogs. * <p/> * Note that wrapping and unwrapping primitive values is extremely inefficient. If * possible, users of this class should override the appropriate methods in this class * and use a table of canonical values. * <p/> * Created: Mon Sep 23 22:07:40 PDT 2002 * * @author Eric D. Friedman * @author Robert D. Eden * @author Jeff Randall */ public class TByteFloatMapDecorator extends AbstractMap<Byte, Float> implements Map<Byte, Float>, Externalizable, Cloneable { static final long serialVersionUID = 1L; /** the wrapped primitive map */ protected TByteFloatMap _map; /** * FOR EXTERNALIZATION ONLY!! */ public TByteFloatMapDecorator() {} /** * Creates a wrapper that decorates the specified primitive map. * * @param map the <tt>TByteFloatMap</tt> to wrap. */ public TByteFloatMapDecorator( TByteFloatMap map ) { super(); this._map = map; } /** * Returns a reference to the map wrapped by this decorator. * * @return the wrapped <tt>TByteFloatMap</tt> instance. */ public TByteFloatMap getMap() { return _map; } /** * Inserts a key/value pair into the map. * * @param key an <code>Object</code> value * @param value an <code>Object</code> value * @return the previous value associated with <tt>key</tt>, * or Float(0) if none was found. */ public Float put( Byte key, Float value ) { byte k; float v; if ( key == null ) { k = _map.getNoEntryKey(); } else { k = unwrapKey( key ); } if ( value == null ) { v = _map.getNoEntryValue(); } else { v = unwrapValue( value ); } float retval = _map.put( k, v ); if ( retval == _map.getNoEntryValue() ) { return null; } return wrapValue( retval ); } /** * Retrieves the value for <tt>key</tt> * * @param key an <code>Object</code> value * @return the value of <tt>key</tt> or null if no such mapping exists. */ public Float get( Object key ) { byte k; if ( key != null ) { if ( key instanceof Byte ) { k = unwrapKey( key ); } else { return null; } } else { k = _map.getNoEntryKey(); } float v = _map.get( k ); // There may be a false positive since primitive maps // cannot return null, so we have to do an extra // check here. if ( v == _map.getNoEntryValue() ) { return null; } else { return wrapValue( v ); } } /** * Empties the map. */ public void clear() { this._map.clear(); } /** * Deletes a key/value pair from the map. * * @param key an <code>Object</code> value * @return the removed value, or null if it was not found in the map */ public Float remove( Object key ) { byte k; if ( key != null ) { if ( key instanceof Byte ) { k = unwrapKey( key ); } else { return null; } } else { k = _map.getNoEntryKey(); } float v = _map.remove( k ); // There may be a false positive since primitive maps // cannot return null, so we have to do an extra // check here. if ( v == _map.getNoEntryValue() ) { return null; } else { return wrapValue( v ); } } /** * Returns a Set view on the entries of the map. * * @return a <code>Set</code> value */ public Set<Map.Entry<Byte,Float>> entrySet() { return new AbstractSet<Map.Entry<Byte,Float>>() { public int size() { return _map.size(); } public boolean isEmpty() { return TByteFloatMapDecorator.this.isEmpty(); } public boolean contains( Object o ) { if (o instanceof Map.Entry) { Object k = ( ( Map.Entry ) o ).getKey(); Object v = ( ( Map.Entry ) o ).getValue(); return TByteFloatMapDecorator.this.containsKey(k) && TByteFloatMapDecorator.this.get(k).equals(v); } else { return false; } } public Iterator<Map.Entry<Byte,Float>> iterator() { return new Iterator<Map.Entry<Byte,Float>>() { private final TByteFloatIterator it = _map.iterator(); public Map.Entry<Byte,Float> next() { it.advance(); byte ik = it.key(); final Byte key = (ik == _map.getNoEntryKey()) ? null : wrapKey( ik ); float iv = it.value(); final Float v = (iv == _map.getNoEntryValue()) ? null : wrapValue( iv ); return new Map.Entry<Byte,Float>() { private Float val = v; public boolean equals( Object o ) { return o instanceof Map.Entry && ( ( Map.Entry ) o ).getKey().equals(key) && ( ( Map.Entry ) o ).getValue().equals(val); } public Byte getKey() { return key; } public Float getValue() { return val; } public int hashCode() { return key.hashCode() + val.hashCode(); } public Float setValue( Float value ) { val = value; return put( key, value ); } }; } public boolean hasNext() { return it.hasNext(); } public void remove() { it.remove(); } }; } public boolean add( Map.Entry<Byte,Float> o ) { throw new UnsupportedOperationException(); } public boolean remove( Object o ) { boolean modified = false; if ( contains( o ) ) { //noinspection unchecked Byte key = ( ( Map.Entry<Byte,Float> ) o ).getKey(); _map.remove( unwrapKey( key ) ); modified = true; } return modified; } public boolean addAll( Collection<? extends Map.Entry<Byte, Float>> c ) { throw new UnsupportedOperationException(); } public void clear() { TByteFloatMapDecorator.this.clear(); } }; } /** * Checks for the presence of <tt>val</tt> in the values of the map. * * @param val an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsValue( Object val ) { return val instanceof Float && _map.containsValue( unwrapValue( val ) ); } /** * Checks for the present of <tt>key</tt> in the keys of the map. * * @param key an <code>Object</code> value * @return a <code>boolean</code> value */ public boolean containsKey( Object key ) { if ( key == null ) return _map.containsKey( _map.getNoEntryKey() ); return key instanceof Byte && _map.containsKey( unwrapKey( key ) ); } /** * Returns the number of entries in the map. * * @return the map's size. */ public int size() { return this._map.size(); } /** * Indicates whether map has any entries. * * @return true if the map is empty */ public boolean isEmpty() { return size() == 0; } /** * Copies the key/value mappings in <tt>map</tt> into this map. * Note that this will be a <b>deep</b> copy, as storage is by * primitive value. * * @param map a <code>Map</code> value */ public void putAll( Map<? extends Byte, ? extends Float> map ) { Iterator<? extends Entry<? extends Byte,? extends Float>> it = map.entrySet().iterator(); for ( int i = map.size(); i-- > 0; ) { Entry<? extends Byte,? extends Float> e = it.next(); this.put( e.getKey(), e.getValue() ); } } /** * Wraps a key * * @param k key in the underlying map * @return an Object representation of the key */ protected Byte wrapKey( byte k ) { return Byte.valueOf( k ); } /** * Unwraps a key * * @param key wrapped key * @return an unwrapped representation of the key */ protected byte unwrapKey( Object key ) { return ( ( Byte ) key ).byteValue(); } /** * Wraps a value * * @param k value in the underlying map * @return an Object representation of the value */ protected Float wrapValue( float k ) { return Float.valueOf( k ); } /** * Unwraps a value * * @param value wrapped value * @return an unwrapped representation of the value */ protected float unwrapValue( Object value ) { return ( ( Float ) value ).floatValue(); } // Implements Externalizable public void readExternal( ObjectInput in ) throws IOException, ClassNotFoundException { // VERSION in.readByte(); // MAP _map = ( TByteFloatMap ) in.readObject(); } // Implements Externalizable public void writeExternal( ObjectOutput out ) throws IOException { // VERSION out.writeByte(0); // MAP out.writeObject( _map ); } } // TByteFloatHashMapDecorator
package robotsimulator.engine; import java.util.ArrayList; import robotsimulator.math.Matrix; import robotsimulator.math.MatrixUtil; import robotsimulator.math.Vector; import se.krka.kahlua.vm.JavaFunction; import se.krka.kahlua.vm.LuaCallFrame; import se.krka.kahlua.vm.LuaState; import se.krka.kahlua.vm.LuaTable; import se.krka.kahlua.vm.LuaTableImpl; /** * Lua-bindings for the {@link Matrix} class * @author Stian Sandviknes */ public class MatrixBinding { private MatrixBinding(){} private static final JavaFunction createMatrix = new JavaFunction() { private double[] decodeSubTable(Object subElement) { if (subElement instanceof LuaTable) { LuaTable table = (LuaTable) subElement; ArrayList<Double> dList = new ArrayList<Double>(); for (int i = 1; i <= table.len(); i++) { if (table.rawget(i) instanceof Double) { dList.add((Double) table.rawget(i)); } } double[] numberList = new double[dList.size()]; for (int i = 0; i < numberList.length; i++) { numberList[i] = dList.get(i); } return numberList; } else { return new double[0]; } } private double[][] decodeTable(LuaTable table) { if (table.len() == 0) { return new double[0][0]; } else { double[][] retval = new double[table.len()][]; for (int i = 1; i <= table.len(); i++) { retval[i - 1] = decodeSubTable(table.rawget(i)); } return retval; } } @Override public int call(LuaCallFrame lcf, int i) { Object root = lcf.get(0); if (root instanceof LuaTable) { Matrix m = new Matrix(decodeTable((LuaTable) lcf.get(0))); m = m.transpose(); lcf.push(m); return 1; } return 0; } }; private static final JavaFunction matrixAdd = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int i) { Object a = lcf.get(0); Object b = lcf.get(1); if (!(a instanceof Matrix) || !(b instanceof Matrix)) { return 0; } else { lcf.push(((Matrix) a).add((Matrix) b)); return 1; } } }; private static final JavaFunction matrixSub = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int i) { Object a = lcf.get(0); Object b = lcf.get(1); if (!(a instanceof Matrix) || !(b instanceof Matrix)) { return 0; } else { lcf.push(((Matrix)a).sub((Matrix)b)); return 1; } } }; private static final JavaFunction matrixMultiply = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int i) { Object a = lcf.get(0); Object b = lcf.get(1); if (!(a instanceof Matrix) || !(b instanceof Matrix)) { return 0; } else { lcf.push(((Matrix)a).multiply((Matrix)b)); return 1; } } }; private static final JavaFunction matrixTransform = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int i) { Object a = lcf.get(0); Object b = lcf.get(1); if (!(a instanceof Matrix) || !(b instanceof Vector)) { return 0; } else { lcf.push(((Matrix)a).transform((Vector)b)); return 1; } } }; private static final JavaFunction matrixIndex = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int i) { Matrix matrix = (Matrix) lcf.get(0); // retrieve one column if (lcf.get(1) instanceof Double) { int index = (int)(double)(Double)lcf.get(1); if (index<0 || index>=matrix.getColumns()) { return 0; } LuaTable tab = new LuaTableImpl(matrix.getRows()); for (int y=0;y<matrix.getRows();y++) { tab.rawset(y, (Double)matrix.getValue(index, y)); } lcf.push(tab); return 1; } if (lcf.get(1) instanceof LuaTable) { LuaTable indexTable = (LuaTable) lcf.get(1); if ((indexTable.rawget(1) instanceof Double) && (indexTable.rawget(2) instanceof Double)) { int indexA = (int)(double)(Double)indexTable.rawget(1); int indexB = (int)(double)(Double)indexTable.rawget(2); if (indexA<0 || indexB < 0 || indexA>=matrix.getColumns() || indexB>=matrix.getRows()) { lcf.pushNil(); } else { lcf.push(matrix.getValue(indexA, indexB)); } return 1; } else { return 0; } } if (lcf.get(1) instanceof String) { String cmd = (String) lcf.get(1); if (cmd.equals("add")) { lcf.push(matrixAdd); } else if (cmd.equals("sub")) { lcf.push(matrixSub); } else if (cmd.equals("mul")) { lcf.push(matrixMultiply); } else if (cmd.equals("transform")) { lcf.push(matrixTransform); } else { lcf.pushNil(); } return 1; } return 0; } }; private static final JavaFunction matrixCreateIdentity = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int argCount) { if (argCount<1) return 0; if (!(lcf.get(0) instanceof Double)) return 0; int size = (int)(double)(Double)lcf.get(0); Matrix m = MatrixUtil.getIdentityMatrix(size); lcf.push(m); return 1; } }; private static final JavaFunction matrixCreateRotation = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int argCount) { if (argCount<3) return 0; if (!(lcf.get(0) instanceof Double)) return 0; if (!(lcf.get(1) instanceof Double)) return 0; if (!(lcf.get(2) instanceof Double)) return 0; double x = (Double)lcf.get(0); double y = (Double)lcf.get(1); double z = (Double)lcf.get(2); Matrix m = MatrixUtil.getRotationMatrix(x,y,z); lcf.push(m); return 1; } }; private static final JavaFunction matrixCreateTranslation = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int argCount) { if (argCount < 3) { return 0; } if (!(lcf.get(0) instanceof Double)) { return 0; } if (!(lcf.get(1) instanceof Double)) { return 0; } if (!(lcf.get(2) instanceof Double)) { return 0; } double x = (Double) lcf.get(0); double y = (Double) lcf.get(1); double z = (Double) lcf.get(2); Matrix m = MatrixUtil.getTranslationMatrix(x, y, z); lcf.push(m); return 1; } }; private static final JavaFunction matrixCreateScaling = new JavaFunction() { @Override public int call(LuaCallFrame lcf, int argCount) { if ((argCount!=1) && (argCount!=3)) { return 0; } if (argCount == 3) { if (!(lcf.get(0) instanceof Double)) { return 0; } if (!(lcf.get(1) instanceof Double)) { return 0; } if (!(lcf.get(2) instanceof Double)) { return 0; } double x = (Double) lcf.get(0); double y = (Double) lcf.get(1); double z = (Double) lcf.get(2); Matrix m = MatrixUtil.getScalingMatrix(x, y, z); lcf.push(m); return 1; } else { if (!(lcf.get(0) instanceof Double)) { return 0; } double val = (Double) lcf.get(0); Matrix m = MatrixUtil.getScalingMatrix(val, val, val); lcf.push(m); return 1; } } }; private static final LuaTable matrixMetatable; static{ matrixMetatable = new LuaTableImpl(); matrixMetatable.rawset("__tostring", LuaTools.toString); matrixMetatable.rawset("__index", matrixIndex); matrixMetatable.rawset("__add", matrixAdd); matrixMetatable.rawset("__sub", matrixSub); } public static void register(LuaState state) { state.setUserdataMetatable(Matrix.class, matrixMetatable); // Create the matrix-creation function table LuaTable matrix = new LuaTableImpl(); matrix.rawset("identity", matrixCreateIdentity); matrix.rawset("rotation", matrixCreateRotation); matrix.rawset("translation", matrixCreateTranslation); matrix.rawset("scaling", matrixCreateScaling); state.getEnvironment().rawset("Matrix", createMatrix); state.getEnvironment().rawset("matrix", matrix); } }
package com.redhat.ceylon.common.config; import java.io.File; import java.net.Proxy; import java.util.Arrays; import java.util.Collections; import java.util.List; import com.redhat.ceylon.common.Constants; import com.redhat.ceylon.common.FileUtil; public class DefaultToolOptions { public final static String DEFAULTS_ENCODING = "defaults.encoding"; public final static String DEFAULTS_OFFLINE = "defaults.offline"; public final static String DEFAULTS_TIMEOUT = "defaults.timeout"; // BACKWARDS-COMPAT public final static String DEFAULTS_MAVENOVERRIDES = "defaults.mavenoverrides"; public final static String DEFAULTS_OVERRIDES = "defaults.overrides"; public final static String DEFAULTS_FLAT_CLASSPATH = "defaults.flatclasspath"; public final static String DEFAULTS_AUTO_EPORT_MAVEN_DEPENDENCIES = "defaults.autoexportmavendependencies"; public final static String COMPILER_SOURCE = "compiler.source"; public final static String COMPILER_RESOURCE = "compiler.resource"; public final static String COMPILER_RESOURCE_ROOT = "compiler.resourceroot"; public final static String COMPILER_SCRIPT = "compiler.script"; public final static String COMPILER_DOC = "compiler.doc"; public final static String COMPILER_SUPPRESSWARNING = "compiler.suppresswarning"; public final static String COMPILER_NOOSGI = "compiler.noosgi"; public final static String COMPILER_OSGIPROVIDEDBUNDLES = "compiler.osgiprovidedbundles"; public final static String COMPILER_NOPOM = "compiler.nopom"; public final static String COMPILER_PACK200 = "compiler.pack200"; public final static String RUNTOOL_COMPILE = "runtool.compile"; public final static String TESTTOOL_COMPILE = "testtool.compile"; private DefaultToolOptions() { } public static String getDefaultEncoding() { return getDefaultEncoding(CeylonConfig.get()); } public static String getDefaultEncoding(CeylonConfig config) { return config.getOption(DEFAULTS_ENCODING); } public static boolean getDefaultOffline() { return getDefaultOffline(CeylonConfig.get()); } public static boolean getDefaultOffline(CeylonConfig config) { return config.getBoolOption(DEFAULTS_OFFLINE, false); } public static long getDefaultTimeout() { return getDefaultTimeout(CeylonConfig.get()); } public static long getDefaultTimeout(CeylonConfig config) { return timeoutFromString(config.getOption(DEFAULTS_TIMEOUT), Constants.DEFAULT_TIMEOUT); } public static int timeoutFromString(String num, int defaultTimeout) { if (num != null) { int fact = 1000; if (num.endsWith("ms")) { num = num.substring(0, num.length() - 2); fact = 1; } return Integer.parseInt(num) * fact; } else { return defaultTimeout; } } public static Proxy getDefaultProxy() { return getDefaultProxy(CeylonConfig.get()); } public static Proxy getDefaultProxy(CeylonConfig config) { Authentication auth = Authentication.fromConfig(config); return auth.getProxy(); } public static String getDefaultOverrides() { return getDefaultOverrides(CeylonConfig.get()); } public static String getDefaultOverrides(CeylonConfig config) { String ov = config.getOption(DEFAULTS_OVERRIDES); if(ov != null) return ov; // backwards compat return config.getOption(DEFAULTS_MAVENOVERRIDES); } public static boolean getDefaultFlatClasspath() { return getDefaultFlatClasspath(CeylonConfig.get()); } public static boolean getDefaultFlatClasspath(CeylonConfig config) { return config.getBoolOption(DEFAULTS_FLAT_CLASSPATH, false); } public static boolean getDefaultAutoExportMavenDependencies() { return getDefaultAutoExportMavenDependencies(CeylonConfig.get()); } public static boolean getDefaultAutoExportMavenDependencies(CeylonConfig config) { return config.getBoolOption(DEFAULTS_AUTO_EPORT_MAVEN_DEPENDENCIES, false); } public static List<File> getCompilerSourceDirs() { return getCompilerSourceDirs(CeylonConfig.get()); } public static List<File> getCompilerSourceDirs(CeylonConfig config) { String[] dirs = config.getOptionValues(COMPILER_SOURCE); if (dirs != null) { return Arrays.asList(FileUtil.pathsToFileArray(dirs)); } else { return Collections.singletonList(new File(Constants.DEFAULT_SOURCE_DIR)); } } public static List<File> getCompilerResourceDirs() { return getCompilerResourceDirs(CeylonConfig.get()); } public static List<File> getCompilerResourceDirs(CeylonConfig config) { String[] dirs = config.getOptionValues(COMPILER_RESOURCE); if (dirs != null) { return Arrays.asList(FileUtil.pathsToFileArray(dirs)); } else { return Collections.singletonList(new File(Constants.DEFAULT_RESOURCE_DIR)); } } public static List<File> getCompilerScriptDirs() { return getCompilerScriptDirs(CeylonConfig.get()); } public static List<File> getCompilerScriptDirs(CeylonConfig config) { String[] dirs = config.getOptionValues(COMPILER_SCRIPT); if (dirs != null) { return Arrays.asList(FileUtil.pathsToFileArray(dirs)); } else { return Collections.singletonList(new File(Constants.DEFAULT_SCRIPT_DIR)); } } public static String getCompilerResourceRootName() { return getCompilerResourceRootName(CeylonConfig.get()); } public static String getCompilerResourceRootName(CeylonConfig config) { return config.getOption(COMPILER_RESOURCE_ROOT, Constants.DEFAULT_RESOURCE_ROOT); } public static List<File> getCompilerDocDirs() { return getCompilerDocDirs(CeylonConfig.get()); } public static List<File> getCompilerDocDirs(CeylonConfig config) { String[] dirs = config.getOptionValues(COMPILER_DOC); if (dirs != null) { return Arrays.asList(FileUtil.pathsToFileArray(dirs)); } else { return Collections.singletonList(new File(Constants.DEFAULT_DOC_DIR)); } } public static String getCompilerOutputRepo() { return getCompilerOutputRepo(CeylonConfig.get()); } public static String getCompilerOutputRepo(CeylonConfig config) { return Repositories.withConfig(config).getOutputRepository().getUrl(); } public static List<String> getCompilerSuppressWarnings() { return getCompilerSuppressWarnings(CeylonConfig.get()); } public static List<String> getCompilerSuppressWarnings(CeylonConfig config) { String[] warnings = config.getOptionValues(COMPILER_SUPPRESSWARNING); if (warnings != null) { return Arrays.asList(warnings); } else { return null; } } public static boolean getCompilerNoOsgi() { return getCompilerNoOsgi(CeylonConfig.get()); } public static boolean getCompilerNoOsgi(CeylonConfig config) { return config.getBoolOption(COMPILER_NOOSGI, false); } public static String getCompilerOsgiProvidedBundles() { return getCompilerOsgiProvidedBundles(CeylonConfig.get()); } public static String getCompilerOsgiProvidedBundles(CeylonConfig config) { return config.getOption(COMPILER_OSGIPROVIDEDBUNDLES, ""); } public static boolean getCompilerNoPom() { return getCompilerNoPom(CeylonConfig.get()); } public static boolean getCompilerNoPom(CeylonConfig config) { return config.getBoolOption(COMPILER_NOPOM, false); } public static boolean getCompilerPack200() { return getCompilerPack200(CeylonConfig.get()); } public static boolean getCompilerPack200(CeylonConfig config) { return config.getBoolOption(COMPILER_PACK200, false); } public static String getRunToolCompileFlags() { return getRunToolCompileFlags(CeylonConfig.get()); } public static String getRunToolCompileFlags(CeylonConfig config) { return config.getOption(RUNTOOL_COMPILE, Constants.DEFAULT_RUNTOOL_COMPILATION_FLAGS); } public static String getTestToolCompileFlags() { return getTestToolCompileFlags(CeylonConfig.get()); } public static String getTestToolCompileFlags(CeylonConfig config) { return config.getOption(TESTTOOL_COMPILE, Constants.DEFAULT_TESTTOOL_COMPILATION_FLAGS); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.caches; import com.azure.cosmos.BridgeInternal; import com.azure.cosmos.implementation.CosmosClientMetadataCachesSnapshot; import com.azure.cosmos.implementation.MetadataDiagnosticsContext; import com.azure.cosmos.implementation.Utils; import com.azure.cosmos.implementation.apachecommons.lang.StringUtils; import com.azure.cosmos.implementation.routing.PartitionKeyRangeIdentity; import com.azure.cosmos.implementation.DocumentCollection; import com.azure.cosmos.implementation.InvalidPartitionException; import com.azure.cosmos.implementation.NotFoundException; import com.azure.cosmos.implementation.PathsHelper; import com.azure.cosmos.implementation.RMResources; import com.azure.cosmos.implementation.ResourceId; import com.azure.cosmos.implementation.RxDocumentServiceRequest; import com.azure.cosmos.models.ModelBridgeInternal; import reactor.core.Exceptions; import reactor.core.publisher.Mono; import java.util.Map; /** * While this class is public, but it is not part of our published public APIs. * This is meant to be internally used only by our sdk. */ public abstract class RxCollectionCache { private final AsyncCache<String, DocumentCollection> collectionInfoByNameCache; private final AsyncCache<String, DocumentCollection> collectionInfoByIdCache; public static void serialize(CosmosClientMetadataCachesSnapshot clientMetadataCachesSnapshot, RxCollectionCache cache) { clientMetadataCachesSnapshot.serializeCollectionInfoByIdCache(cache.collectionInfoByIdCache); clientMetadataCachesSnapshot.serializeCollectionInfoByNameCache(cache.collectionInfoByNameCache); } protected RxCollectionCache(AsyncCache<String, DocumentCollection> collectionInfoByNameCache, AsyncCache<String, DocumentCollection> collectionInfoByIdCache) { this.collectionInfoByNameCache = collectionInfoByNameCache; this.collectionInfoByIdCache = collectionInfoByIdCache; } protected RxCollectionCache() { this(new AsyncCache<>(new CollectionRidComparer()), new AsyncCache<>(new CollectionRidComparer())); } /** * Resolves a request to a collection in a sticky manner. * Unless request.ForceNameCacheRefresh is equal to true, it will return the same collection. * @param request Request to resolve. * @return an instance of Single&lt;DocumentCollection&gt; */ public Mono<Utils.ValueHolder<DocumentCollection>> resolveCollectionAsync( MetadataDiagnosticsContext metaDataDiagnosticsContext, RxDocumentServiceRequest request) { // Mono Void to represent only terminal events specifically complete and error Mono<Void> init = null; if (request.getIsNameBased()) { if (request.isForceNameCacheRefresh()) { Mono<Void> mono = this.refreshAsync(metaDataDiagnosticsContext, request); init = mono.then(Mono.fromRunnable(() -> request.setForceNameCacheRefresh(false))); } Mono<Utils.ValueHolder<DocumentCollection>> collectionInfoObs = this.resolveByPartitionKeyRangeIdentityAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics),request.getPartitionKeyRangeIdentity(), request.properties); if (init != null) { collectionInfoObs = init.then(collectionInfoObs); } return collectionInfoObs.flatMap(collectionValueHolder -> { if (collectionValueHolder.v != null) { return Mono.just(collectionValueHolder); } if (request.requestContext.resolvedCollectionRid == null) { Mono<DocumentCollection> collectionInfoRes = this.resolveByNameAsync(metaDataDiagnosticsContext, request.getResourceAddress(), request.properties); return collectionInfoRes.flatMap(collection -> { // TODO: how to async log this? // logger.debug( // "Mapped resourceName {} to getResourceId {}.", // request.getResourceAddress(), // collectionInfo.getResourceId()); request.setResourceId(collection.getResourceId()); request.requestContext.resolvedCollectionRid = collection.getResourceId(); return Mono.just(new Utils.ValueHolder<>(collection)); }); } else { return this.resolveByRidAsync(metaDataDiagnosticsContext, request.requestContext.resolvedCollectionRid, request.properties); } }); } else { return resolveByPartitionKeyRangeIdentityAsync(metaDataDiagnosticsContext, request.getPartitionKeyRangeIdentity(),request.properties) .flatMap(collectionValueHolder -> { if (collectionValueHolder.v != null) { return Mono.just(collectionValueHolder); } return this.resolveByRidAsync(metaDataDiagnosticsContext, request.getResourceAddress(), request.properties); }); } } /** * This method is only used in retry policy as it doesn't have request handy. * @param resourceAddress */ public void refresh(MetadataDiagnosticsContext metaDataDiagnosticsContext, String resourceAddress, Map<String, Object> properties) { if (PathsHelper.isNameBased(resourceAddress)) { String resourceFullName = PathsHelper.getCollectionPath(resourceAddress); this.collectionInfoByNameCache.refresh( resourceFullName, () -> { Mono<DocumentCollection> collectionObs = this.getByNameAsync(metaDataDiagnosticsContext, resourceFullName, properties); return collectionObs.doOnSuccess(collection -> this.collectionInfoByIdCache.set(collection.getResourceId(), collection)); }); } } protected abstract Mono<DocumentCollection> getByRidAsync(MetadataDiagnosticsContext metaDataDiagnosticsContext, String collectionRid, Map<String, Object> properties); protected abstract Mono<DocumentCollection> getByNameAsync(MetadataDiagnosticsContext metaDataDiagnosticsContext, String resourceAddress, Map<String, Object> properties); private Mono<Utils.ValueHolder<DocumentCollection>> resolveByPartitionKeyRangeIdentityAsync(MetadataDiagnosticsContext metaDataDiagnosticsContext, PartitionKeyRangeIdentity partitionKeyRangeIdentity, Map<String, Object> properties) { // if request is targeted at specific partition using x-ms-documentd-partitionkeyrangeid header, // which contains value "<collectionrid>,<partitionkeyrangeid>", then resolve to collection rid in this header. if (partitionKeyRangeIdentity != null && partitionKeyRangeIdentity.getCollectionRid() != null) { return this.resolveByRidAsync(metaDataDiagnosticsContext, partitionKeyRangeIdentity.getCollectionRid(), properties) .onErrorResume(e -> { Throwable unwrappedException = Exceptions.unwrap(e); if (unwrappedException instanceof NotFoundException) { // This is signal to the upper logic either to refresh // collection cache and retry. return Mono.error(new InvalidPartitionException(RMResources.InvalidDocumentCollection)); } return Mono.error(unwrappedException); }); } return Mono.just(new Utils.ValueHolder<>(null)); } public Mono<Utils.ValueHolder<DocumentCollection>> resolveByRidAsync( MetadataDiagnosticsContext metaDataDiagnosticsContext, String resourceId, Map<String, Object> properties) { ResourceId resourceIdParsed = ResourceId.parse(resourceId); String collectionResourceId = resourceIdParsed.getDocumentCollectionId().toString(); Mono<DocumentCollection> async = this.collectionInfoByIdCache.getAsync( collectionResourceId, null, () -> this.getByRidAsync(metaDataDiagnosticsContext, collectionResourceId, properties)); return async.map(Utils.ValueHolder::new); } public Mono<DocumentCollection> resolveByNameAsync( MetadataDiagnosticsContext metaDataDiagnosticsContext, String resourceAddress, Map<String, Object> properties) { return this.resolveByNameAsync(metaDataDiagnosticsContext, resourceAddress, properties, null); } public Mono<DocumentCollection> resolveByNameAsync( MetadataDiagnosticsContext metaDataDiagnosticsContext, String resourceAddress, Map<String, Object> properties, DocumentCollection obsoleteValue) { String resourceFullName = PathsHelper.getCollectionPath(resourceAddress); return this.collectionInfoByNameCache.getAsync( resourceFullName, obsoleteValue, () -> { Mono<DocumentCollection> collectionObs = this.getByNameAsync( metaDataDiagnosticsContext, resourceFullName, properties); return collectionObs.doOnSuccess(collection -> this.collectionInfoByIdCache.set( collection.getResourceId(), collection)); }); } public Mono<Void> refreshAsync(MetadataDiagnosticsContext metaDataDiagnosticsContext, RxDocumentServiceRequest request) { // TODO System.Diagnostics.Debug.Assert(request.IsNameBased); String resourceFullName = PathsHelper.getCollectionPath(request.getResourceAddress()); Mono<Void> mono; if (request.requestContext.resolvedCollectionRid != null) { // Here we will issue backend call only if cache wasn't already refreshed (if whatever is there corresponds to previously resolved collection rid). DocumentCollection obsoleteValue = new DocumentCollection(); ModelBridgeInternal.setResourceId(obsoleteValue, request.requestContext.resolvedCollectionRid); mono = this.collectionInfoByNameCache.getAsync( resourceFullName, obsoleteValue, () -> { Mono<DocumentCollection> collectionObs = this.getByNameAsync(metaDataDiagnosticsContext, resourceFullName, request.properties); return collectionObs.doOnSuccess(collection -> { this.collectionInfoByIdCache.set(collection.getResourceId(), collection); }); }).then(); } else { // In case of ForceRefresh directive coming from client, there will be no ResolvedCollectionRid, so we // need to refresh unconditionally. mono = Mono.fromRunnable(() -> this.refresh(metaDataDiagnosticsContext, request.getResourceAddress(), request.properties)); } return mono.doOnSuccess(aVoid -> request.requestContext.resolvedCollectionRid = null); } private static class CollectionRidComparer implements IEqualityComparer<DocumentCollection> { private static final long serialVersionUID = 1l; public boolean areEqual(DocumentCollection left, DocumentCollection right) { if (left == null && right == null) { return true; } if ((left == null) ^ (right == null)) { return false; } return StringUtils.equals(left.getResourceId(), right.getResourceId()); } } }
/* * Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.db.document; import java.util.*; import com.orientechnologies.common.exception.OException; import com.orientechnologies.common.log.OLogManager; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.db.ODatabaseComplex; import com.orientechnologies.orient.core.db.ODatabaseRecordWrapperAbstract; import com.orientechnologies.orient.core.db.record.ODatabaseRecordTx; import com.orientechnologies.orient.core.db.record.ridbag.sbtree.OSBTreeCollectionManager; import com.orientechnologies.orient.core.exception.OConcurrentModificationException; import com.orientechnologies.orient.core.exception.ODatabaseException; import com.orientechnologies.orient.core.exception.OValidationException; import com.orientechnologies.orient.core.id.OClusterPosition; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.index.OIndexAbstract; import com.orientechnologies.orient.core.iterator.ORecordIteratorClass; import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.security.ODatabaseSecurityResources; import com.orientechnologies.orient.core.metadata.security.ORole; import com.orientechnologies.orient.core.record.ORecordInternal; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.storage.ORecordCallback; import com.orientechnologies.orient.core.storage.impl.local.OFreezableStorage; import com.orientechnologies.orient.core.version.ORecordVersion; @SuppressWarnings("unchecked") public class ODatabaseDocumentTx extends ODatabaseRecordWrapperAbstract<ODatabaseRecordTx> implements ODatabaseDocument { public ODatabaseDocumentTx(final String iURL) { super(new ODatabaseRecordTx(iURL, ODocument.RECORD_TYPE)); } public ODatabaseDocumentTx(final ODatabaseRecordTx iSource) { super(iSource); } private void freezeIndexes(final List<OIndexAbstract<?>> indexesToFreeze, boolean throwException) { if (indexesToFreeze != null) { for (OIndexAbstract<?> indexToLock : indexesToFreeze) { indexToLock.freeze(throwException); } } } private void flushIndexes(List<OIndexAbstract<?>> indexesToFlush) { for (OIndexAbstract<?> index : indexesToFlush) { index.flush(); } } private List<OIndexAbstract<?>> prepareIndexesToFreeze(Collection<? extends OIndex<?>> indexes) { List<OIndexAbstract<?>> indexesToFreeze = null; if (indexes != null && !indexes.isEmpty()) { indexesToFreeze = new ArrayList<OIndexAbstract<?>>(indexes.size()); for (OIndex<?> index : indexes) { indexesToFreeze.add((OIndexAbstract<?>) index.getInternal()); } Collections.sort(indexesToFreeze, new Comparator<OIndex<?>>() { public int compare(OIndex<?> o1, OIndex<?> o2) { return o1.getName().compareTo(o2.getName()); } }); } return indexesToFreeze; } private void releaseIndexes(Collection<? extends OIndex<?>> indexesToRelease) { if (indexesToRelease != null) { Iterator<? extends OIndex<?>> it = indexesToRelease.iterator(); while (it.hasNext()) { it.next().getInternal().release(); it.remove(); } } } @Override public void freeze(final boolean throwException) { if (!(getStorage() instanceof OFreezableStorage)) { OLogManager.instance().error(this, "We can not freeze non local storage. " + "If you use remote client please use OServerAdmin instead."); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final Collection<? extends OIndex<?>> indexes = getMetadata().getIndexManager().getIndexes(); final List<OIndexAbstract<?>> indexesToLock = prepareIndexesToFreeze(indexes); freezeIndexes(indexesToLock, true); flushIndexes(indexesToLock); super.freeze(throwException); Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } @Override public void freeze() { if (!(getStorage() instanceof OFreezableStorage)) { OLogManager.instance().error(this, "We can not freeze non local storage. " + "If you use remote client please use OServerAdmin instead."); return; } final long startTime = Orient.instance().getProfiler().startChrono(); final Collection<? extends OIndex<?>> indexes = getMetadata().getIndexManager().getIndexes(); final List<OIndexAbstract<?>> indexesToLock = prepareIndexesToFreeze(indexes); freezeIndexes(indexesToLock, false); flushIndexes(indexesToLock); super.freeze(); Orient.instance().getProfiler() .stopChrono("db." + getName() + ".freeze", "Time to freeze the database", startTime, "db.*.freeze"); } @Override public void release() { if (!(getStorage() instanceof OFreezableStorage)) { OLogManager.instance().error(this, "We can not release non local storage. " + "If you use remote client please use OServerAdmin instead."); return; } final long startTime = Orient.instance().getProfiler().startChrono(); super.release(); Collection<? extends OIndex<?>> indexes = getMetadata().getIndexManager().getIndexes(); releaseIndexes(indexes); Orient.instance().getProfiler() .stopChrono("db." + getName() + ".release", "Time to release the database", startTime, "db.*.release"); } /** * Creates a new ODocument. */ @Override public ODocument newInstance() { return new ODocument(); } public ODocument newInstance(final String iClassName) { checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, iClassName); return new ODocument(iClassName); } public ORecordIteratorClass<ODocument> browseClass(final String iClassName) { return browseClass(iClassName, true); } public ORecordIteratorClass<ODocument> browseClass(final String iClassName, final boolean iPolymorphic) { if (getMetadata().getSchema().getClass(iClassName) == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in current database"); checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_READ, iClassName); return new ORecordIteratorClass<ODocument>(this, underlying, iClassName, iPolymorphic, true, false); } @Override public ORecordIteratorCluster<ODocument> browseCluster(final String iClusterName) { checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, underlying, getClusterIdByName(iClusterName), true); } @Override public ORecordIteratorCluster<ODocument> browseCluster(String iClusterName, OClusterPosition startClusterPosition, OClusterPosition endClusterPosition, boolean loadTombstones) { checkSecurity(ODatabaseSecurityResources.CLUSTER, ORole.PERMISSION_READ, iClusterName); return new ORecordIteratorCluster<ODocument>(this, underlying, getClusterIdByName(iClusterName), startClusterPosition, endClusterPosition, true, loadTombstones); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p/> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a * {@link OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the * constraints declared in the schema if any (can work also in schema-less mode). To validate the document the * {@link ODocument#validate()} is called. * * @param iRecord * Record to save. * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * @throws OConcurrentModificationException * if the version of the document is different by the version contained in the database. * @throws OValidationException * if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecordInternal<?>> RET save(final ORecordInternal<?> iRecord) { return (RET) save(iRecord, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document to the database. Behavior depends by the current running transaction if any. If no transaction is running then * changes apply immediately. If an Optimistic transaction is running then the record will be changed at commit time. The current * transaction will continue to see the record as modified, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as modified, while others * cannot access to it since it's locked. * <p/> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a * {@link OConcurrentModificationException} exception is thrown.Before to save the document it must be valid following the * constraints declared in the schema if any (can work also in schema-less mode). To validate the document the * {@link ODocument#validate()} is called. * * * * @param iRecord * Record to save. * @param iForceCreate * Flag that indicates that record should be created. If record with current rid already exists, exception is thrown * @param iRecordCreatedCallback * @param iRecordUpdatedCallback * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * @throws OConcurrentModificationException * if the version of the document is different by the version contained in the database. * @throws OValidationException * if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()} */ @Override public <RET extends ORecordInternal<?>> RET save(final ORecordInternal<?> iRecord, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) { if (!(iRecord instanceof ODocument)) return (RET) super.save(iRecord, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); ODocument doc = (ODocument) iRecord; doc.validate(); doc.convertAllMultiValuesToTrackedVersions(); try { if (iForceCreate || doc.getIdentity().isNew()) { // NEW RECORD if (doc.getClassName() != null) checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, doc.getClassName()); if (doc.getSchemaClass() != null && doc.getIdentity().getClusterId() < 0) { // CLASS FOUND: FORCE THE STORING IN THE CLUSTER CONFIGURED String clusterName = getClusterNameById(doc.getSchemaClass().getDefaultClusterId()); return (RET) super.save(doc, clusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } } else { // UPDATE: CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (doc.getClassName() != null) checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_UPDATE, doc.getClassName()); } doc = super.save(doc, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); } catch (OException e) { // PASS THROUGH throw e; } catch (Exception e) { OLogManager.instance().exception("Error on saving record %s of class '%s'", e, ODatabaseException.class, iRecord.getIdentity(), (doc.getClassName() != null ? doc.getClassName() : "?")); } return (RET) doc; } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p/> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a * {@link OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the * constraints declared in the schema if any (can work also in schema-less mode). To validate the document the * {@link ODocument#validate()} is called. * * @param iRecord * Record to save * @param iClusterName * Cluster name where to save the record * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * @throws OConcurrentModificationException * if the version of the document is different by the version contained in the database. * @throws OValidationException * if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ORecordSchemaAware#validate() */ @Override public <RET extends ORecordInternal<?>> RET save(final ORecordInternal<?> iRecord, final String iClusterName) { return (RET) save(iRecord, iClusterName, OPERATION_MODE.SYNCHRONOUS, false, null, null); } /** * Saves a document specifying a cluster where to store the record. Behavior depends by the current running transaction if any. If * no transaction is running then changes apply immediately. If an Optimistic transaction is running then the record will be * changed at commit time. The current transaction will continue to see the record as modified, while others not. If a Pessimistic * transaction is running, then an exclusive lock is acquired against the record. Current transaction will continue to see the * record as modified, while others cannot access to it since it's locked. * <p/> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a * {@link OConcurrentModificationException} exception is thrown. Before to save the document it must be valid following the * constraints declared in the schema if any (can work also in schema-less mode). To validate the document the * {@link ODocument#validate()} is called. * * * @param iRecord * Record to save * @param iClusterName * Cluster name where to save the record * @param iMode * Mode of save: synchronous (default) or asynchronous * @param iForceCreate * Flag that indicates that record should be created. If record with current rid already exists, exception is thrown * @param iRecordCreatedCallback * @param iRecordUpdatedCallback * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * @throws OConcurrentModificationException * if the version of the document is different by the version contained in the database. * @throws OValidationException * if the document breaks some validation constraints defined in the schema * @see #setMVCC(boolean), {@link #isMVCC()}, ORecordSchemaAware#validate() */ @Override public <RET extends ORecordInternal<?>> RET save(final ORecordInternal<?> iRecord, String iClusterName, final OPERATION_MODE iMode, boolean iForceCreate, final ORecordCallback<? extends Number> iRecordCreatedCallback, ORecordCallback<ORecordVersion> iRecordUpdatedCallback) { if (!(iRecord instanceof ODocument)) return (RET) super.save(iRecord, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); ODocument doc = (ODocument) iRecord; if (iForceCreate || !doc.getIdentity().isValid()) { if (doc.getClassName() != null) checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_CREATE, doc.getClassName()); if (iClusterName == null && doc.getSchemaClass() != null) // FIND THE RIGHT CLUSTER AS CONFIGURED IN CLASS iClusterName = getClusterNameById(doc.getSchemaClass().getDefaultClusterId()); int id = getClusterIdByName(iClusterName); if (id == -1) throw new IllegalArgumentException("Cluster name " + iClusterName + " is not configured"); final int[] clusterIds; if (doc.getSchemaClass() != null) { // CHECK IF THE CLUSTER IS PART OF THE CONFIGURED CLUSTERS clusterIds = doc.getSchemaClass().getClusterIds(); int i = 0; for (; i < clusterIds.length; ++i) if (clusterIds[i] == id) break; if (i == clusterIds.length) throw new IllegalArgumentException("Cluster name " + iClusterName + " is not configured to store the class " + doc.getClassName()); } else clusterIds = new int[] { id }; } else { // UPDATE: CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (doc.getClassName() != null) checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_UPDATE, doc.getClassName()); } doc.validate(); doc.convertAllMultiValuesToTrackedVersions(); doc = super.save(doc, iClusterName, iMode, iForceCreate, iRecordCreatedCallback, iRecordUpdatedCallback); return (RET) doc; } /** * Deletes a document. Behavior depends by the current running transaction if any. If no transaction is running then the record is * deleted immediately. If an Optimistic transaction is running then the record will be deleted at commit time. The current * transaction will continue to see the record as deleted, while others not. If a Pessimistic transaction is running, then an * exclusive lock is acquired against the record. Current transaction will continue to see the record as deleted, while others * cannot access to it since it's locked. * <p/> * If MVCC is enabled and the version of the document is different by the version stored in the database, then a * {@link OConcurrentModificationException} exception is thrown. * * @param iRecord * @return The Database instance itself giving a "fluent interface". Useful to call multiple methods in chain. * @see #setMVCC(boolean), {@link #isMVCC()} */ public ODatabaseDocumentTx delete(final ORecordInternal<?> iRecord) { if (iRecord == null) throw new ODatabaseException("Cannot delete null document"); // CHECK ACCESS ON SCHEMA CLASS NAME (IF ANY) if (iRecord instanceof ODocument && ((ODocument) iRecord).getClassName() != null) checkSecurity(ODatabaseSecurityResources.CLASS, ORole.PERMISSION_DELETE, ((ODocument) iRecord).getClassName()); try { underlying.delete(iRecord); } catch (Exception e) { if (iRecord instanceof ODocument) OLogManager.instance().exception("Error on deleting record %s of class '%s'", e, ODatabaseException.class, iRecord.getIdentity(), ((ODocument) iRecord).getClassName()); else OLogManager.instance().exception("Error on deleting record %s", e, ODatabaseException.class, iRecord.getIdentity()); } return this; } /** * Returns the number of the records of the class iClassName. */ public long countClass(final String iClassName) { final OClass cls = getMetadata().getSchema().getClass(iClassName); if (cls == null) throw new IllegalArgumentException("Class '" + iClassName + "' not found in database"); return cls.count(); } public ODatabaseComplex<ORecordInternal<?>> commit() { try { return underlying.commit(); } finally { getTransaction().close(); } } public ODatabaseComplex<ORecordInternal<?>> rollback() { try { return underlying.rollback(); } finally { getTransaction().close(); } } public String getType() { return TYPE; } @Override public OSBTreeCollectionManager getSbTreeCollectionManager() { return underlying.getSbTreeCollectionManager(); } }
package org.apache.lucene.search; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.index.IndexReader; import org.apache.lucene.util.Bits; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.document.NumericField; // for javadocs import org.apache.lucene.analysis.NumericTokenStream; // for javadocs import java.io.IOException; import java.io.Serializable; import java.io.PrintStream; import java.text.DecimalFormat; /** * Expert: Maintains caches of term values. * * <p>Created: May 19, 2004 11:13:14 AM * * @since lucene 1.4 * @see org.apache.lucene.util.FieldCacheSanityChecker */ public interface FieldCache { public static final class CreationPlaceholder { Object value; } /** Indicator for StringIndex values in the cache. */ // NOTE: the value assigned to this constant must not be // the same as any of those in SortField!! public static final int STRING_INDEX = -1; /** Expert: Stores term text values and document ordering data. */ public static class StringIndex { public int binarySearchLookup(String key) { // this special case is the reason that Arrays.binarySearch() isn't useful. if (key == null) return 0; int low = 1; int high = lookup.length-1; while (low <= high) { int mid = (low + high) >>> 1; int cmp = lookup[mid].compareTo(key); if (cmp < 0) low = mid + 1; else if (cmp > 0) high = mid - 1; else return mid; // key found } return -(low + 1); // key not found. } /** All the term values, in natural order. */ public final String[] lookup; /** For each document, an index into the lookup array. */ public final int[] order; /** Creates one of these objects */ public StringIndex (int[] values, String[] lookup) { this.order = values; this.lookup = lookup; } } /** * Marker interface as super-interface to all parsers. It * is used to specify a custom parser to {@link * SortField#SortField(String, FieldCache.Parser)}. */ public interface Parser extends Serializable { } /** Interface to parse bytes from document fields. * @see FieldCache#getBytes(IndexReader, String, FieldCache.ByteParser) */ public interface ByteParser extends Parser { /** Return a single Byte representation of this field's value. */ public byte parseByte(String string); } /** Interface to parse shorts from document fields. * @see FieldCache#getShorts(IndexReader, String, FieldCache.ShortParser) */ public interface ShortParser extends Parser { /** Return a short representation of this field's value. */ public short parseShort(String string); } /** Interface to parse ints from document fields. * @see FieldCache#getInts(IndexReader, String, FieldCache.IntParser) */ public interface IntParser extends Parser { /** Return an integer representation of this field's value. */ public int parseInt(String string); } /** Interface to parse floats from document fields. * @see FieldCache#getFloats(IndexReader, String, FieldCache.FloatParser) */ public interface FloatParser extends Parser { /** Return an float representation of this field's value. */ public float parseFloat(String string); } /** Interface to parse long from document fields. * @see FieldCache#getLongs(IndexReader, String, FieldCache.LongParser) */ public interface LongParser extends Parser { /** Return an long representation of this field's value. */ public long parseLong(String string); } /** Interface to parse doubles from document fields. * @see FieldCache#getDoubles(IndexReader, String, FieldCache.DoubleParser) */ public interface DoubleParser extends Parser { /** Return an long representation of this field's value. */ public double parseDouble(String string); } /** Expert: The cache used internally by sorting and range query classes. */ public static FieldCache DEFAULT = new FieldCacheImpl(); /** The default parser for byte values, which are encoded by {@link Byte#toString(byte)} */ public static final ByteParser DEFAULT_BYTE_PARSER = new ByteParser() { public byte parseByte(String value) { return Byte.parseByte(value); } protected Object readResolve() { return DEFAULT_BYTE_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_BYTE_PARSER"; } }; /** The default parser for short values, which are encoded by {@link Short#toString(short)} */ public static final ShortParser DEFAULT_SHORT_PARSER = new ShortParser() { public short parseShort(String value) { return Short.parseShort(value); } protected Object readResolve() { return DEFAULT_SHORT_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_SHORT_PARSER"; } }; /** The default parser for int values, which are encoded by {@link Integer#toString(int)} */ public static final IntParser DEFAULT_INT_PARSER = new IntParser() { public int parseInt(String value) { return Integer.parseInt(value); } protected Object readResolve() { return DEFAULT_INT_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_INT_PARSER"; } }; /** The default parser for float values, which are encoded by {@link Float#toString(float)} */ public static final FloatParser DEFAULT_FLOAT_PARSER = new FloatParser() { public float parseFloat(String value) { return Float.parseFloat(value); } protected Object readResolve() { return DEFAULT_FLOAT_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_FLOAT_PARSER"; } }; /** The default parser for long values, which are encoded by {@link Long#toString(long)} */ public static final LongParser DEFAULT_LONG_PARSER = new LongParser() { public long parseLong(String value) { return Long.parseLong(value); } protected Object readResolve() { return DEFAULT_LONG_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_LONG_PARSER"; } }; /** The default parser for double values, which are encoded by {@link Double#toString(double)} */ public static final DoubleParser DEFAULT_DOUBLE_PARSER = new DoubleParser() { public double parseDouble(String value) { return Double.parseDouble(value); } protected Object readResolve() { return DEFAULT_DOUBLE_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".DEFAULT_DOUBLE_PARSER"; } }; /** * A parser instance for int values encoded by {@link NumericUtils#intToPrefixCoded(int)}, e.g. when indexed * via {@link NumericField}/{@link NumericTokenStream}. */ public static final IntParser NUMERIC_UTILS_INT_PARSER=new IntParser(){ public int parseInt(String val) { final int shift = val.charAt(0)-NumericUtils.SHIFT_START_INT; if (shift>0 && shift<=31) throw new FieldCacheImpl.StopFillCacheException(); return NumericUtils.prefixCodedToInt(val); } protected Object readResolve() { return NUMERIC_UTILS_INT_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".NUMERIC_UTILS_INT_PARSER"; } }; /** * A parser instance for float values encoded with {@link NumericUtils}, e.g. when indexed * via {@link NumericField}/{@link NumericTokenStream}. */ public static final FloatParser NUMERIC_UTILS_FLOAT_PARSER=new FloatParser(){ public float parseFloat(String val) { final int shift = val.charAt(0)-NumericUtils.SHIFT_START_INT; if (shift>0 && shift<=31) throw new FieldCacheImpl.StopFillCacheException(); return NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(val)); } protected Object readResolve() { return NUMERIC_UTILS_FLOAT_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".NUMERIC_UTILS_FLOAT_PARSER"; } }; /** * A parser instance for long values encoded by {@link NumericUtils#longToPrefixCoded(long)}, e.g. when indexed * via {@link NumericField}/{@link NumericTokenStream}. */ public static final LongParser NUMERIC_UTILS_LONG_PARSER = new LongParser(){ public long parseLong(String val) { final int shift = val.charAt(0)-NumericUtils.SHIFT_START_LONG; if (shift>0 && shift<=63) throw new FieldCacheImpl.StopFillCacheException(); return NumericUtils.prefixCodedToLong(val); } protected Object readResolve() { return NUMERIC_UTILS_LONG_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".NUMERIC_UTILS_LONG_PARSER"; } }; /** * A parser instance for double values encoded with {@link NumericUtils}, e.g. when indexed * via {@link NumericField}/{@link NumericTokenStream}. */ public static final DoubleParser NUMERIC_UTILS_DOUBLE_PARSER = new DoubleParser(){ public double parseDouble(String val) { final int shift = val.charAt(0)-NumericUtils.SHIFT_START_LONG; if (shift>0 && shift<=63) throw new FieldCacheImpl.StopFillCacheException(); return NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(val)); } protected Object readResolve() { return NUMERIC_UTILS_DOUBLE_PARSER; } @Override public String toString() { return FieldCache.class.getName()+".NUMERIC_UTILS_DOUBLE_PARSER"; } }; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> and returns a bit set at the size of * <code>reader.maxDoc()</code>, with turned on bits for each docid that * does have a value for this field. */ public Bits getDocsWithField(IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is * found, reads the terms in <code>field</code> as a single byte and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the single byte values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public byte[] getBytes (IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as bytes and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the bytes. * @param parser Computes byte for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public byte[] getBytes (IndexReader reader, String field, ByteParser parser) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as bytes and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the bytes. * @param parser Computes byte for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public byte[] getBytes (IndexReader reader, String field, ByteParser parser, boolean setDocsWithField) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is * found, reads the terms in <code>field</code> as shorts and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the shorts. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public short[] getShorts (IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as shorts and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the shorts. * @param parser Computes short for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public short[] getShorts (IndexReader reader, String field, ShortParser parser) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as shorts and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the shorts. * @param parser Computes short for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public short[] getShorts (IndexReader reader, String field, ShortParser parser, boolean setDocsWithField) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is * found, reads the terms in <code>field</code> as integers and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the integers. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public int[] getInts (IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as integers and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the integers. * @param parser Computes integer for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public int[] getInts (IndexReader reader, String field, IntParser parser) throws IOException; /** Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as integers and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * @param reader Used to get field values. * @param field Which field contains the integers. * @param parser Computes integer for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public int[] getInts (IndexReader reader, String field, IntParser parser, boolean setDocsWithField) throws IOException; /** Checks the internal cache for an appropriate entry, and if * none is found, reads the terms in <code>field</code> as floats and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the floats. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public float[] getFloats (IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if * none is found, reads the terms in <code>field</code> as floats and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the floats. * @param parser Computes float for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public float[] getFloats (IndexReader reader, String field, FloatParser parser) throws IOException; /** Checks the internal cache for an appropriate entry, and if * none is found, reads the terms in <code>field</code> as floats and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the floats. * @param parser Computes float for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public float[] getFloats (IndexReader reader, String field, FloatParser parser, boolean setDocsWithField) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is * found, reads the terms in <code>field</code> as longs and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * * @param reader Used to get field values. * @param field Which field contains the longs. * @return The values in the given field for each document. * @throws java.io.IOException If any error occurs. */ public long[] getLongs(IndexReader reader, String field) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as longs and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * * @param reader Used to get field values. * @param field Which field contains the longs. * @param parser Computes integer for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public long[] getLongs(IndexReader reader, String field, LongParser parser) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as longs and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * * @param reader Used to get field values. * @param field Which field contains the longs. * @param parser Computes integer for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public long[] getLongs(IndexReader reader, String field, LongParser parser, boolean setDocsWithField) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is * found, reads the terms in <code>field</code> as integers and returns an array * of size <code>reader.maxDoc()</code> of the value each document * has in the given field. * * @param reader Used to get field values. * @param field Which field contains the doubles. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public double[] getDoubles(IndexReader reader, String field) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as doubles and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * * @param reader Used to get field values. * @param field Which field contains the doubles. * @param parser Computes integer for string values. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public double[] getDoubles(IndexReader reader, String field, DoubleParser parser) throws IOException; /** * Checks the internal cache for an appropriate entry, and if none is found, * reads the terms in <code>field</code> as doubles and returns an array of * size <code>reader.maxDoc()</code> of the value each document has in the * given field. * * @param reader Used to get field values. * @param field Which field contains the doubles. * @param parser Computes integer for string values. * @param setDocsWithField If true then {@link #getDocsWithField} will * also be computed and stored in the FieldCache. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public double[] getDoubles(IndexReader reader, String field, DoubleParser parser, boolean setDocsWithField) throws IOException; /** Checks the internal cache for an appropriate entry, and if none * is found, reads the term values in <code>field</code> and returns an array * of size <code>reader.maxDoc()</code> containing the value each document * has in the given field. * @param reader Used to get field values. * @param field Which field contains the strings. * @return The values in the given field for each document. * @throws IOException If any error occurs. */ public String[] getStrings (IndexReader reader, String field) throws IOException; /** Checks the internal cache for an appropriate entry, and if none * is found reads the term values in <code>field</code> and returns * an array of them in natural order, along with an array telling * which element in the term array each document uses. * @param reader Used to get field values. * @param field Which field contains the strings. * @return Array of terms and index into the array for each document. * @throws IOException If any error occurs. */ public StringIndex getStringIndex (IndexReader reader, String field) throws IOException; /** * EXPERT: A unique Identifier/Description for each item in the FieldCache. * Can be useful for logging/debugging. * @lucene.experimental */ public static abstract class CacheEntry { public abstract Object getReaderKey(); public abstract String getFieldName(); public abstract Class<?> getCacheType(); public abstract Object getCustom(); public abstract Object getValue(); private String size = null; protected final void setEstimatedSize(String size) { this.size = size; } /** * Computes (and stores) the estimated size of the cache Value * @see #getEstimatedSize */ public void estimateSize() { long size = RamUsageEstimator.sizeOf(getValue()); setEstimatedSize(RamUsageEstimator.humanReadableUnits(size)); } /** * The most recently estimated size of the value, null unless * estimateSize has been called. */ public final String getEstimatedSize() { return size; } @Override public String toString() { StringBuilder b = new StringBuilder(); b.append("'").append(getReaderKey()).append("'=>"); b.append("'").append(getFieldName()).append("',"); b.append(getCacheType()).append(",").append(getCustom()); b.append("=>").append(getValue().getClass().getName()).append("#"); b.append(System.identityHashCode(getValue())); String s = getEstimatedSize(); if(null != s) { b.append(" (size =~ ").append(s).append(')'); } return b.toString(); } } /** * EXPERT: Generates an array of CacheEntry objects representing all items * currently in the FieldCache. * <p> * NOTE: These CacheEntry objects maintain a strong reference to the * Cached Values. Maintaining references to a CacheEntry the IndexReader * associated with it has garbage collected will prevent the Value itself * from being garbage collected when the Cache drops the WeakReference. * </p> * @lucene.experimental */ public abstract CacheEntry[] getCacheEntries(); /** * <p> * EXPERT: Instructs the FieldCache to forcibly expunge all entries * from the underlying caches. This is intended only to be used for * test methods as a way to ensure a known base state of the Cache * (with out needing to rely on GC to free WeakReferences). * It should not be relied on for "Cache maintenance" in general * application code. * </p> * @lucene.experimental */ public abstract void purgeAllCaches(); /** * Expert: drops all cache entries associated with this * reader. NOTE: this reader must precisely match the * reader that the cache entry is keyed on. If you pass a * top-level reader, it usually will have no effect as * Lucene now caches at the segment reader level. */ public abstract void purge(IndexReader r); /** * If non-null, FieldCacheImpl will warn whenever * entries are created that are not sane according to * {@link org.apache.lucene.util.FieldCacheSanityChecker}. */ public void setInfoStream(PrintStream stream); /** counterpart of {@link #setInfoStream(PrintStream)} */ public PrintStream getInfoStream(); }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.people.v1.model; /** * A person's physical address. May be a P.O. box or street address. All fields are optional. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the People API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Address extends com.google.api.client.json.GenericJson { /** * The city of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String city; /** * The country of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String country; /** * The [ISO 3166-1 alpha-2](http://www.iso.org/iso/country_codes.htm) country code of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String countryCode; /** * The extended address of the address; for example, the apartment number. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String extendedAddress; /** * Output only. The type of the address translated and formatted in the viewer's account locale or * the `Accept-Language` HTTP header locale. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String formattedType; /** * The unstructured value of the address. If this is not set by the user it will be automatically * constructed from structured values. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String formattedValue; /** * Metadata about the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private FieldMetadata metadata; /** * The P.O. box of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String poBox; /** * The postal code of the address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String postalCode; /** * The region of the address; for example, the state or province. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String region; /** * The street address. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String streetAddress; /** * The type of the address. The type can be custom or one of these predefined values: * * * `home` * `work` * `other` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String type; /** * The city of the address. * @return value or {@code null} for none */ public java.lang.String getCity() { return city; } /** * The city of the address. * @param city city or {@code null} for none */ public Address setCity(java.lang.String city) { this.city = city; return this; } /** * The country of the address. * @return value or {@code null} for none */ public java.lang.String getCountry() { return country; } /** * The country of the address. * @param country country or {@code null} for none */ public Address setCountry(java.lang.String country) { this.country = country; return this; } /** * The [ISO 3166-1 alpha-2](http://www.iso.org/iso/country_codes.htm) country code of the address. * @return value or {@code null} for none */ public java.lang.String getCountryCode() { return countryCode; } /** * The [ISO 3166-1 alpha-2](http://www.iso.org/iso/country_codes.htm) country code of the address. * @param countryCode countryCode or {@code null} for none */ public Address setCountryCode(java.lang.String countryCode) { this.countryCode = countryCode; return this; } /** * The extended address of the address; for example, the apartment number. * @return value or {@code null} for none */ public java.lang.String getExtendedAddress() { return extendedAddress; } /** * The extended address of the address; for example, the apartment number. * @param extendedAddress extendedAddress or {@code null} for none */ public Address setExtendedAddress(java.lang.String extendedAddress) { this.extendedAddress = extendedAddress; return this; } /** * Output only. The type of the address translated and formatted in the viewer's account locale or * the `Accept-Language` HTTP header locale. * @return value or {@code null} for none */ public java.lang.String getFormattedType() { return formattedType; } /** * Output only. The type of the address translated and formatted in the viewer's account locale or * the `Accept-Language` HTTP header locale. * @param formattedType formattedType or {@code null} for none */ public Address setFormattedType(java.lang.String formattedType) { this.formattedType = formattedType; return this; } /** * The unstructured value of the address. If this is not set by the user it will be automatically * constructed from structured values. * @return value or {@code null} for none */ public java.lang.String getFormattedValue() { return formattedValue; } /** * The unstructured value of the address. If this is not set by the user it will be automatically * constructed from structured values. * @param formattedValue formattedValue or {@code null} for none */ public Address setFormattedValue(java.lang.String formattedValue) { this.formattedValue = formattedValue; return this; } /** * Metadata about the address. * @return value or {@code null} for none */ public FieldMetadata getMetadata() { return metadata; } /** * Metadata about the address. * @param metadata metadata or {@code null} for none */ public Address setMetadata(FieldMetadata metadata) { this.metadata = metadata; return this; } /** * The P.O. box of the address. * @return value or {@code null} for none */ public java.lang.String getPoBox() { return poBox; } /** * The P.O. box of the address. * @param poBox poBox or {@code null} for none */ public Address setPoBox(java.lang.String poBox) { this.poBox = poBox; return this; } /** * The postal code of the address. * @return value or {@code null} for none */ public java.lang.String getPostalCode() { return postalCode; } /** * The postal code of the address. * @param postalCode postalCode or {@code null} for none */ public Address setPostalCode(java.lang.String postalCode) { this.postalCode = postalCode; return this; } /** * The region of the address; for example, the state or province. * @return value or {@code null} for none */ public java.lang.String getRegion() { return region; } /** * The region of the address; for example, the state or province. * @param region region or {@code null} for none */ public Address setRegion(java.lang.String region) { this.region = region; return this; } /** * The street address. * @return value or {@code null} for none */ public java.lang.String getStreetAddress() { return streetAddress; } /** * The street address. * @param streetAddress streetAddress or {@code null} for none */ public Address setStreetAddress(java.lang.String streetAddress) { this.streetAddress = streetAddress; return this; } /** * The type of the address. The type can be custom or one of these predefined values: * * * `home` * `work` * `other` * @return value or {@code null} for none */ public java.lang.String getType() { return type; } /** * The type of the address. The type can be custom or one of these predefined values: * * * `home` * `work` * `other` * @param type type or {@code null} for none */ public Address setType(java.lang.String type) { this.type = type; return this; } @Override public Address set(String fieldName, Object value) { return (Address) super.set(fieldName, value); } @Override public Address clone() { return (Address) super.clone(); } }
package org.apache.lucene.util.packed; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.RamUsageEstimator; import java.io.IOException; import java.util.Arrays; /** * Space optimized random access capable array of values with a fixed number of * bits. The maximum number of bits/value is 31. Use {@link Packed64} for higher * numbers. * </p><p> * The implementation strives to avoid conditionals and expensive operations, * sacrificing code clarity to achieve better performance. */ class Packed32 extends PackedInts.ReaderImpl implements PackedInts.Mutable { static final int BLOCK_SIZE = 32; // 32 = int, 64 = long static final int BLOCK_BITS = 5; // The #bits representing BLOCK_SIZE static final int MOD_MASK = BLOCK_SIZE - 1; // x % BLOCK_SIZE private static final int ENTRY_SIZE = BLOCK_SIZE + 1; private static final int FAC_BITPOS = 3; /* * In order to make an efficient value-getter, conditionals should be * avoided. A value can be positioned inside of a block, requiring shifting * left or right or it can span two blocks, requiring a left-shift on the * first block and a right-shift on the right block. * </p><p> * By always shifting the first block both left and right, we get exactly * the right bits. By always shifting the second block right and applying * a mask, we get the right bits there. After that, we | the two bitsets. */ private static final int[][] SHIFTS = new int[ENTRY_SIZE][ENTRY_SIZE * FAC_BITPOS]; private static final int[][] MASKS = new int[ENTRY_SIZE][ENTRY_SIZE]; static { // Generate shifts for (int elementBits = 1 ; elementBits <= BLOCK_SIZE ; elementBits++) { for (int bitPos = 0 ; bitPos < BLOCK_SIZE ; bitPos++) { int[] currentShifts = SHIFTS[elementBits]; int base = bitPos * FAC_BITPOS; currentShifts[base ] = bitPos; currentShifts[base + 1] = BLOCK_SIZE - elementBits; if (bitPos <= BLOCK_SIZE - elementBits) { // Single block currentShifts[base + 2] = 0; MASKS[elementBits][bitPos] = 0; } else { // Two blocks int rBits = elementBits - (BLOCK_SIZE - bitPos); currentShifts[base + 2] = BLOCK_SIZE - rBits; MASKS[elementBits][bitPos] = ~(~0 << rBits); } } } } /* * The setter requires more masking than the getter. */ private static final int[][] WRITE_MASKS = new int[ENTRY_SIZE][ENTRY_SIZE * FAC_BITPOS]; static { for (int elementBits = 1 ; elementBits <= BLOCK_SIZE ; elementBits++) { int elementPosMask = ~(~0 << elementBits); int[] currentShifts = SHIFTS[elementBits]; int[] currentMasks = WRITE_MASKS[elementBits]; for (int bitPos = 0 ; bitPos < BLOCK_SIZE ; bitPos++) { int base = bitPos * FAC_BITPOS; currentMasks[base ] =~((elementPosMask << currentShifts[base + 1]) >>> currentShifts[base]); currentMasks[base+1] = ~(elementPosMask << currentShifts[base + 2]); currentMasks[base+2] = currentShifts[base + 2] == 0 ? 0 : ~0; if (bitPos <= BLOCK_SIZE - elementBits) { // Second block not used currentMasks[base+1] = ~0; // Keep all bits currentMasks[base+2] = 0; // Or with 0 } } } } /* The bits */ private int[] blocks; // Cached calculations private int maxPos; // blocks.length * BLOCK_SIZE / bitsPerValue - 1 private int[] shifts; // The shifts for the current bitsPerValue private int[] readMasks; private int[] writeMasks; /** * Creates an array with the internal structures adjusted for the given * limits and initialized to 0. * @param valueCount the number of elements. * @param bitsPerValue the number of bits available for any given value. * Note: bitsPerValue >32 is not supported by this implementation. */ public Packed32(int valueCount, int bitsPerValue) { this(new int[(int)(((long)valueCount) * bitsPerValue / BLOCK_SIZE + 2)], valueCount, bitsPerValue); } /** * Creates an array with content retrieved from the given IndexInput. * @param in an IndexInput, positioned at the start of Packed64-content. * @param valueCount the number of elements. * @param bitsPerValue the number of bits available for any given value. * @throws java.io.IOException if the values for the backing array could not * be retrieved. */ public Packed32(IndexInput in, int valueCount, int bitsPerValue) throws IOException { super(valueCount, bitsPerValue); int size = size(bitsPerValue, valueCount); blocks = new int[size + 1]; // +1 due to non-conditional tricks for(int i = 0 ; i < size ; i++) { blocks[i] = in.readInt(); } if (size % 2 == 1) { in.readInt(); // Align to long } updateCached(); } private static int size(int bitsPerValue, int valueCount) { final long totBitCount = (long) valueCount * bitsPerValue; return (int) (totBitCount/32 + ((totBitCount % 32 == 0 ) ? 0:1)); } /** * Creates an array backed by the given blocks. * </p><p> * Note: The blocks are used directly, so changes to the given block will * affect the Packed32-structure. * @param blocks used as the internal backing array. * @param valueCount the number of values. * @param bitsPerValue the number of bits available for any given value. * Note: bitsPerValue >32 is not supported by this implementation. */ public Packed32(int[] blocks, int valueCount, int bitsPerValue) { // TODO: Check that blocks.length is sufficient for holding length values super(valueCount, bitsPerValue); if (bitsPerValue > 31) { throw new IllegalArgumentException(String.format( "This array only supports values of 31 bits or less. The " + "required number of bits was %d. The Packed64 " + "implementation allows values with more than 31 bits", bitsPerValue)); } this.blocks = blocks; updateCached(); } private void updateCached() { readMasks = MASKS[bitsPerValue]; maxPos = (int)((((long)blocks.length) * BLOCK_SIZE / bitsPerValue) - 2); shifts = SHIFTS[bitsPerValue]; writeMasks = WRITE_MASKS[bitsPerValue]; } /** * @param index the position of the value. * @return the value at the given index. */ public long get(final int index) { final long majorBitPos = index * bitsPerValue; final int elementPos = (int)(majorBitPos >>> BLOCK_BITS); // / BLOCK_SIZE final int bitPos = (int)(majorBitPos & MOD_MASK); // % BLOCK_SIZE); final int base = bitPos * FAC_BITPOS; return ((blocks[elementPos] << shifts[base]) >>> shifts[base+1]) | ((blocks[elementPos+1] >>> shifts[base+2]) & readMasks[bitPos]); } public void set(final int index, final long value) { final int intValue = (int)value; final long majorBitPos = index * bitsPerValue; final int elementPos = (int)(majorBitPos >>> BLOCK_BITS); // / BLOCK_SIZE final int bitPos = (int)(majorBitPos & MOD_MASK); // % BLOCK_SIZE); final int base = bitPos * FAC_BITPOS; blocks[elementPos ] = (blocks[elementPos ] & writeMasks[base]) | (intValue << shifts[base + 1] >>> shifts[base]); blocks[elementPos+1] = (blocks[elementPos+1] & writeMasks[base+1]) | ((intValue << shifts[base + 2]) & writeMasks[base+2]); } public void clear() { Arrays.fill(blocks, 0); } public String toString() { return "Packed32(bitsPerValue=" + bitsPerValue + ", maxPos=" + maxPos + ", elements.length=" + blocks.length + ")"; } public long ramBytesUsed() { return RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + blocks.length * RamUsageEstimator.NUM_BYTES_INT; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.data.cosmos.rx; import com.azure.data.cosmos.BridgeInternal; import com.azure.data.cosmos.CosmosBridgeInternal; import com.azure.data.cosmos.CosmosClient; import com.azure.data.cosmos.CosmosClientBuilder; import com.azure.data.cosmos.CosmosClientException; import com.azure.data.cosmos.CosmosContainer; import com.azure.data.cosmos.CosmosDatabase; import com.azure.data.cosmos.CosmosItemProperties; import com.azure.data.cosmos.FeedOptions; import com.azure.data.cosmos.FeedResponse; import com.azure.data.cosmos.internal.*; import com.azure.data.cosmos.Resource; import com.azure.data.cosmos.internal.Utils.ValueHolder; import com.azure.data.cosmos.internal.query.CompositeContinuationToken; import com.azure.data.cosmos.internal.routing.Range; import io.reactivex.subscribers.TestSubscriber; import org.testng.annotations.DataProvider; import org.testng.annotations.Factory; import org.testng.annotations.Test; import org.testng.annotations.Ignore; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import reactor.core.publisher.Flux; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static com.azure.data.cosmos.CommonsBridgeInternal.partitionKeyRangeIdInternal; import static org.assertj.core.api.Assertions.assertThat; public class ParallelDocumentQueryTest extends TestSuiteBase { private CosmosDatabase createdDatabase; private CosmosContainer createdCollection; private List<CosmosItemProperties> createdDocuments; private CosmosClient client; public String getCollectionLink() { return TestUtils.getCollectionNameLink(createdDatabase.id(), createdCollection.id()); } @Factory(dataProvider = "clientBuildersWithDirect") public ParallelDocumentQueryTest(CosmosClientBuilder clientBuilder) { super(clientBuilder); } @DataProvider(name = "queryMetricsArgProvider") public Object[][] queryMetricsArgProvider() { return new Object[][]{ {true}, {false}, }; } @Test(groups = { "simple" }, timeOut = TIMEOUT, dataProvider = "queryMetricsArgProvider") public void queryDocuments(boolean qmEnabled) { String query = "SELECT * from c where c.prop = 99"; FeedOptions options = new FeedOptions(); options.maxItemCount(5); options.enableCrossPartitionQuery(true); options.populateQueryMetrics(qmEnabled); options.maxDegreeOfParallelism(2); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); List<CosmosItemProperties> expectedDocs = createdDocuments.stream().filter(d -> 99 == d.getInt("prop") ).collect(Collectors.toList()); assertThat(expectedDocs).isNotEmpty(); FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>() .totalSize(expectedDocs.size()) .exactlyContainsInAnyOrder(expectedDocs.stream().map(d -> d.resourceId()).collect(Collectors.toList())) .allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>() .requestChargeGreaterThanOrEqualTo(1.0).build()) .hasValidQueryMetrics(qmEnabled) .build(); validateQuerySuccess(queryObservable, validator, TIMEOUT); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryMetricEquality() throws Exception { String query = "SELECT * from c where c.prop = 99"; FeedOptions options = new FeedOptions(); options.maxItemCount(5); options.enableCrossPartitionQuery(true); options.populateQueryMetrics(true); options.maxDegreeOfParallelism(0); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); List<FeedResponse<CosmosItemProperties>> resultList1 = queryObservable.collectList().block(); options.maxDegreeOfParallelism(4); Flux<FeedResponse<CosmosItemProperties>> threadedQueryObs = createdCollection.queryItems(query, options); List<FeedResponse<CosmosItemProperties>> resultList2 = threadedQueryObs.collectList().block(); assertThat(resultList1.size()).isEqualTo(resultList2.size()); for(int i = 0; i < resultList1.size(); i++){ compareQueryMetrics(BridgeInternal.queryMetricsFromFeedResponse(resultList1.get(i)), BridgeInternal.queryMetricsFromFeedResponse(resultList2.get(i))); } } private void compareQueryMetrics(Map<String, QueryMetrics> qm1, Map<String, QueryMetrics> qm2) { assertThat(qm1.keySet().size()).isEqualTo(qm2.keySet().size()); QueryMetrics queryMetrics1 = BridgeInternal.createQueryMetricsFromCollection(qm1.values()); QueryMetrics queryMetrics2 = BridgeInternal.createQueryMetricsFromCollection(qm2.values()); assertThat(queryMetrics1.getRetrievedDocumentSize()).isEqualTo(queryMetrics2.getRetrievedDocumentSize()); assertThat(queryMetrics1.getRetrievedDocumentCount()).isEqualTo(queryMetrics2.getRetrievedDocumentCount()); assertThat(queryMetrics1.getIndexHitDocumentCount()).isEqualTo(queryMetrics2.getIndexHitDocumentCount()); assertThat(queryMetrics1.getOutputDocumentCount()).isEqualTo(queryMetrics2.getOutputDocumentCount()); assertThat(queryMetrics1.getOutputDocumentSize()).isEqualTo(queryMetrics2.getOutputDocumentSize()); assertThat(BridgeInternal.getClientSideMetrics(queryMetrics1).getRequestCharge()) .isEqualTo(BridgeInternal.getClientSideMetrics(queryMetrics1).getRequestCharge()); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void queryDocuments_NoResults() { String query = "SELECT * from root r where r.id = '2'"; FeedOptions options = new FeedOptions(); options.enableCrossPartitionQuery(true); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>() .containsExactly(new ArrayList<>()) .numberOfPagesIsGreaterThanOrEqualTo(1) .allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>() .pageSizeIsLessThanOrEqualTo(0) .requestChargeGreaterThanOrEqualTo(1.0).build()) .build(); validateQuerySuccess(queryObservable, validator); } @Test(groups = { "simple" }, timeOut = 2 * TIMEOUT) public void queryDocumentsWithPageSize() { String query = "SELECT * from root"; FeedOptions options = new FeedOptions(); int pageSize = 3; options.maxItemCount(pageSize); options.maxDegreeOfParallelism(-1); options.enableCrossPartitionQuery(true); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); List<CosmosItemProperties> expectedDocs = createdDocuments; assertThat(expectedDocs).isNotEmpty(); FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator .Builder<CosmosItemProperties>() .exactlyContainsInAnyOrder(expectedDocs .stream() .map(d -> d.resourceId()) .collect(Collectors.toList())) .numberOfPagesIsGreaterThanOrEqualTo((expectedDocs.size() + 1) / 3) .allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>() .requestChargeGreaterThanOrEqualTo(1.0) .pageSizeIsLessThanOrEqualTo(pageSize) .build()) .build(); validateQuerySuccess(queryObservable, validator, 2 * subscriberValidationTimeout); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void invalidQuerySyntax() { String query = "I am an invalid query"; FeedOptions options = new FeedOptions(); options.enableCrossPartitionQuery(true); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); FailureValidator validator = new FailureValidator.Builder() .instanceOf(CosmosClientException.class) .statusCode(400) .notNullActivityId() .build(); validateQueryFailure(queryObservable, validator); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void crossPartitionQueryNotEnabled() { String query = "SELECT * from root"; FeedOptions options = new FeedOptions(); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); List<CosmosItemProperties> expectedDocs = createdDocuments; FeedResponseListValidator<CosmosItemProperties> validator = new FeedResponseListValidator.Builder<CosmosItemProperties>() .totalSize(expectedDocs.size()) .exactlyContainsInAnyOrder(expectedDocs.stream().map(d -> d.resourceId()).collect(Collectors.toList())) .allPagesSatisfy(new FeedResponseValidator.Builder<CosmosItemProperties>() .requestChargeGreaterThanOrEqualTo(1.0) .build()) .build(); validateQuerySuccess(queryObservable, validator); } @Test(groups = { "simple" }, timeOut = 2 * TIMEOUT) public void partitionKeyRangeId() { int sum = 0; for (String partitionKeyRangeId : CosmosBridgeInternal.getAsyncDocumentClient(client).readPartitionKeyRanges(getCollectionLink(), null) .flatMap(p -> Flux.fromIterable(p.results())) .map(Resource::id).collectList().single().block()) { String query = "SELECT * from root"; FeedOptions options = new FeedOptions(); partitionKeyRangeIdInternal(options, partitionKeyRangeId); int queryResultCount = createdCollection.queryItems(query, options) .flatMap(p -> Flux.fromIterable(p.results())) .collectList().block().size(); sum += queryResultCount; } assertThat(sum).isEqualTo(createdDocuments.size()); } @Test(groups = { "simple" }, timeOut = TIMEOUT) public void compositeContinuationTokenRoundTrip() throws Exception { { // Positive CompositeContinuationToken compositeContinuationToken = new CompositeContinuationToken("asdf", new Range<String>("A", "D", false, true)); String serialized = compositeContinuationToken.toString(); ValueHolder<CompositeContinuationToken> outCompositeContinuationToken = new ValueHolder<CompositeContinuationToken>(); boolean succeeed = CompositeContinuationToken.tryParse(serialized, outCompositeContinuationToken); assertThat(succeeed).isTrue(); CompositeContinuationToken deserialized = outCompositeContinuationToken.v; String token = deserialized.getToken(); Range<String> range = deserialized.getRange(); assertThat(token).isEqualTo("asdf"); assertThat(range.getMin()).isEqualTo("A"); assertThat(range.getMax()).isEqualTo("D"); assertThat(range.isMinInclusive()).isEqualTo(false); assertThat(range.isMaxInclusive()).isEqualTo(true); } { // Negative ValueHolder<CompositeContinuationToken> outCompositeContinuationToken = new ValueHolder<CompositeContinuationToken>(); boolean succeeed = CompositeContinuationToken.tryParse("{\"property\" : \"not a valid composite continuation token\"}", outCompositeContinuationToken); assertThat(succeeed).isFalse(); } { // Negative - GATEWAY composite continuation token ValueHolder<CompositeContinuationToken> outCompositeContinuationToken = new ValueHolder<CompositeContinuationToken>(); boolean succeeed = CompositeContinuationToken.tryParse("{\"token\":\"-RID:tZFQAImzNLQLAAAAAAAAAA==#RT:1#TRC:10\",\"range\":{\"min\":\"\",\"max\":\"FF\"}}", outCompositeContinuationToken); assertThat(succeeed).isFalse(); } } // TODO: This test has been timing out on build, related work item - https://msdata.visualstudio.com/CosmosDB/_workitems/edit/402438/ @Test(groups = { "non-emulator" }, timeOut = TIMEOUT * 10) public void queryDocumentsWithCompositeContinuationTokens() throws Exception { String query = "SELECT * FROM c"; // Get Expected List<CosmosItemProperties> expectedDocs = new ArrayList<>(createdDocuments); assertThat(expectedDocs).isNotEmpty(); this.queryWithContinuationTokensAndPageSizes(query, new int[] {1, 10, 100}, expectedDocs); } @BeforeClass(groups = { "simple", "non-emulator" }, timeOut = 2 * SETUP_TIMEOUT) public void beforeClass() { client = clientBuilder().build(); createdDatabase = getSharedCosmosDatabase(client); createdCollection = getSharedMultiPartitionCosmosContainer(client); truncateCollection(createdCollection); List<CosmosItemProperties> docDefList = new ArrayList<>(); for(int i = 0; i < 13; i++) { docDefList.add(getDocumentDefinition(i)); } for(int i = 0; i < 21; i++) { docDefList.add(getDocumentDefinition(99)); } createdDocuments = bulkInsertBlocking(createdCollection, docDefList); waitIfNeededForReplicasToCatchUp(clientBuilder()); } @AfterClass(groups = { "simple", "non-emulator" }, timeOut = SHUTDOWN_TIMEOUT, alwaysRun = true) public void afterClass() { safeClose(client); } private static CosmosItemProperties getDocumentDefinition(int cnt) { String uuid = UUID.randomUUID().toString(); CosmosItemProperties doc = new CosmosItemProperties(String.format("{ " + "\"id\": \"%s\", " + "\"prop\" : %d, " + "\"mypk\": \"%s\", " + "\"sgmts\": [[6519456, 1471916863], [2498434, 1455671440]]" + "}" , uuid, cnt, uuid)); return doc; } @Test(groups = { "simple" }, timeOut = TIMEOUT, enabled = false) public void invalidQuerySytax() throws Exception { String query = "I am an invalid query"; FeedOptions options = new FeedOptions(); options.enableCrossPartitionQuery(true); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); FailureValidator validator = new FailureValidator.Builder().instanceOf(CosmosClientException.class) .statusCode(400).notNullActivityId().build(); validateQueryFailure(queryObservable, validator); } public CosmosItemProperties createDocument(CosmosContainer cosmosContainer, int cnt) throws CosmosClientException { CosmosItemProperties docDefinition = getDocumentDefinition(cnt); return cosmosContainer.createItem(docDefinition).block().properties(); } private void queryWithContinuationTokensAndPageSizes(String query, int[] pageSizes, List<CosmosItemProperties> expectedDocs) { for (int pageSize : pageSizes) { List<CosmosItemProperties> receivedDocuments = this.queryWithContinuationTokens(query, pageSize); List<String> actualIds = new ArrayList<String>(); for (CosmosItemProperties document : receivedDocuments) { actualIds.add(document.resourceId()); } List<String> expectedIds = new ArrayList<String>(); for (CosmosItemProperties document : expectedDocs) { expectedIds.add(document.resourceId()); } assertThat(actualIds).containsOnlyElementsOf(expectedIds); } } private List<CosmosItemProperties> queryWithContinuationTokens(String query, int pageSize) { String requestContinuation = null; List<String> continuationTokens = new ArrayList<String>(); List<CosmosItemProperties> receivedDocuments = new ArrayList<CosmosItemProperties>(); do { FeedOptions options = new FeedOptions(); options.maxItemCount(pageSize); options.enableCrossPartitionQuery(true); options.maxDegreeOfParallelism(2); options.requestContinuation(requestContinuation); Flux<FeedResponse<CosmosItemProperties>> queryObservable = createdCollection.queryItems(query, options); TestSubscriber<FeedResponse<CosmosItemProperties>> testSubscriber = new TestSubscriber<>(); queryObservable.subscribe(testSubscriber); testSubscriber.awaitTerminalEvent(TIMEOUT, TimeUnit.MILLISECONDS); testSubscriber.assertNoErrors(); testSubscriber.assertComplete(); FeedResponse<CosmosItemProperties> firstPage = (FeedResponse<CosmosItemProperties>) testSubscriber.getEvents().get(0).get(0); requestContinuation = firstPage.continuationToken(); receivedDocuments.addAll(firstPage.results()); continuationTokens.add(requestContinuation); } while (requestContinuation != null); return receivedDocuments; } }
package de.fraunhofer.iosb.testrunner; import java.io.File; import java.lang.ref.SoftReference; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import de.fraunhofer.iosb.messaginghelpers.LogConfigurationHelper; import de.fraunhofer.iosb.tc_lib.AbstractTestCase; import de.fraunhofer.iosb.tc_lib.IVCTVersionCheck; import de.fraunhofer.iosb.tc_lib.IVCTVersionCheckException; import de.fraunhofer.iosb.tc_lib.IVCT_Verdict; import nato.ivct.commander.CmdHeartbeatSend; import nato.ivct.commander.CmdHeartbeatSend.OnCmdHeartbeatSend; import nato.ivct.commander.CmdListTestSuites; import nato.ivct.commander.CmdListTestSuites.TestSuiteDescription; import nato.ivct.commander.CmdOperatorConfirmationListener; import nato.ivct.commander.CmdOperatorConfirmationListener.OnOperatorConfirmationListener; import nato.ivct.commander.CmdOperatorConfirmationListener.OperatorConfirmationInfo; import nato.ivct.commander.CmdQuitListener; import nato.ivct.commander.CmdQuitListener.OnQuitListener; import nato.ivct.commander.CmdSendTcVerdict; import nato.ivct.commander.CmdSetLogLevel.LogLevel; import nato.ivct.commander.CmdSetLogLevelListener; import nato.ivct.commander.CmdSetLogLevelListener.OnSetLogLevelListener; import nato.ivct.commander.CmdStartTcListener; import nato.ivct.commander.CmdStartTcListener.OnStartTestCaseListener; import nato.ivct.commander.CmdStartTcListener.TcInfo; import nato.ivct.commander.CmdAbortTcListener; import nato.ivct.commander.CmdAbortTcListener.OnAbortTestCaseListener; import nato.ivct.commander.CmdAbortTcListener.TcAbortInfo; import nato.ivct.commander.Factory; import nato.ivct.commander.TcLoggerData; /** * Testrunner that listens for certain commands to start and stop test cases. * * @author Manfred Schenk (Fraunhofer IOSB) * @author Reinhard Herzog (Fraunhofer IOSB) */ public class TestEngine extends TestRunner implements OnSetLogLevelListener, OnQuitListener, OnStartTestCaseListener, OnAbortTestCaseListener, OnCmdHeartbeatSend, OnOperatorConfirmationListener { private AbstractTestCase testCase = null; private CmdListTestSuites testSuites; private Map<String, URLClassLoader> classLoaders = new HashMap<>(); // for enhanced heartbeat with RTI-Type-Information brf 22.10.2020 private String testEngineLabel; // the number of threads in the fixed thread pool private static final int MAX_THREADS = 10; ExecutorService executorService = Executors.newFixedThreadPool(MAX_THREADS); private Map<String,SoftReference<Future<?>>> threadCache = new HashMap<>(); /** * Main entry point from the command line. * * @param args The command line arguments */ public static void main(final String[] args) { startTestEngine(); } private static void startTestEngine() { new TestEngine(); } /** * public constructor. */ public TestEngine() { // set heartbeat identifier myClassName = this.getClass().getSimpleName(); // initialize the IVCT Commander Factory Factory.initialize(); // for enhanced heartbeat with RTI-Type-Information brf 22.10.2020 testEngineLabel = Factory.props.getProperty("TESTENGINE_LABEL") ; // Configure the logger LogConfigurationHelper.configureLogging(); // start command listeners new CmdSetLogLevelListener(this).execute(); new CmdStartTcListener(this).execute(); new CmdQuitListener(this).execute(); new CmdAbortTcListener(this).execute(); try { (new CmdHeartbeatSend(this)).execute(); } catch (Exception e1) { Set<Logger> loggers = TcLoggerData.getLoggers(); for (Logger entry : loggers) { entry.error("Could not start HeartbeatSend: ",e1); } if (loggers.size() == 0) { System.out.println("Could not start HeartbeatSend: " + e1.toString()); } } (new CmdOperatorConfirmationListener(this)).execute(); // get the test suite descriptions testSuites = new CmdListTestSuites(); try { testSuites.execute(); } catch (Exception e) { e.printStackTrace(); } } /** * * @author hzg * * TestScheduleRunner executes a sequence of test cases * */ private class TestScheduleRunner implements Runnable { TcInfo info; TestScheduleRunner(final TcInfo info) { this.info = info; } private File getCwd() { return new File("").getAbsoluteFile(); } /** * This method provides a way to set the current working directory which is not * available as such in java. * * N.B. This method uses a trick to get the desired result * * @param directoryName name of directory to be the current directory * @return true if successful */ private boolean setCurrentDirectory(String directoryName) { boolean result = false; // Boolean indicating whether directory was // set File directory; // Desired current working directory directory = new File(directoryName).getAbsoluteFile(); if (directory.exists()) { directory.mkdirs(); result = (System.setProperty("user.dir", directory.getAbsolutePath()) != null); } return result; } private void extendThreadClassLoader(final TestSuiteDescription testSuiteDescription) { URLClassLoader classLoader = classLoaders.get(testSuiteDescription.id); if (classLoader == null) { String tsPath = Factory.props.getProperty(Factory.IVCT_TS_DIST_HOME_ID); String libPath = tsPath + "/" + testSuiteDescription.tsLibTimeFolder; File dir = new File(libPath); File[] filesList = dir.listFiles(); if (filesList == null) { Set<Logger> loggers = TcLoggerData.getLoggers(); for (Logger entry : loggers) { entry.info("No files found in folder {}", dir.getPath()); } if (loggers.size() == 0) { System.out.println("No files found in folder {}" + dir.getPath()); } return; } URL[] urls = new URL[filesList.length]; for (int i = 0; i < filesList.length; i++) { try { urls[i] = filesList[i].toURI().toURL(); } catch (MalformedURLException e) { e.printStackTrace(); } } classLoader = new URLClassLoader(urls, TestRunner.class.getClassLoader()); classLoaders.put(testSuiteDescription.id, classLoader); } Thread.currentThread().setContextClassLoader(classLoader); } public void run() { Logger tcLogger = LoggerFactory.getLogger(info.testCaseId); TcLoggerData.addLoggerData(tcLogger, tcLogger.getName(), info.sutName, info.testSuiteId, info.testCaseId); tcLogger.info("TestEngine:onMessageConsumer:run: {}", info.testCaseId); TestSuiteDescription tsd = testSuites.getTestSuiteForTc(info.testCaseId); if (tsd == null) { tcLogger.error("TestEngine:onMessageConsumer:run: unknown testsuite for testcase: {}", info.testCaseId); return; } String runFolder = Factory.props.getProperty(Factory.IVCT_TS_DIST_HOME_ID) + '/' + tsd.tsRunTimeFolder; tcLogger.info("TestEngine:onMessageConsumer:run: tsRunFolder is {}", runFolder); if (setCurrentDirectory(runFolder)) { tcLogger.info("TestEngine:onMessageConsumer:run: setCurrentDirectory true"); } File f = getCwd(); String tcDir = f.getAbsolutePath(); tcLogger.info("TestEngine:onMessageConsumer:run: TC DIR is {}", tcDir); tcLogger.info("TestEngine:onMessageConsumer:run: The test case class is: {}", info.testCaseId); String[] testcases = info.testCaseId.split("\\s"); IVCT_Verdict verdicts[] = new IVCT_Verdict[testcases.length]; extendThreadClassLoader(tsd); int i = 0; for (final String classname : testcases) { testCase = null; try { testCase = (AbstractTestCase) Thread.currentThread().getContextClassLoader().loadClass(classname) .newInstance(); } catch (InstantiationException | IllegalAccessException | ClassNotFoundException ex) { tcLogger.error("Could not instantiate " + classname + " !", ex); continue; } if (testCase == null) { verdicts[i] = new IVCT_Verdict(); verdicts[i].verdict = IVCT_Verdict.Verdict.INCONCLUSIVE; verdicts[i].text = "Could not instantiate " + classname; i++; continue; } testCase.setDefaultLogger(tcLogger); testCase.setSutName(info.sutName); testCase.setTsName(info.testSuiteId); testCase.setTcName(classname); testCase.setSettingsDesignator(info.settingsDesignator); testCase.setFederationName(info.federationName); testCase.setSutFederateName(info.sutFederateName); /* * Check the compability of IVCT-Version which had this testCase at * building-time against the IVCT-Version at Runtime */ try { tcLogger.debug("TestEngine.run.compabilityCheck: the IVCTVersion of testcase {} is: {}", testCase, testCase.getIVCTVersion()); new IVCTVersionCheck(testCase.getIVCTVersion()).compare(); } catch (IVCTVersionCheckException cf) { tcLogger.error("TestEngine: IVCTVersionCheck shows problems with IVCTVersion-Check ", cf); verdicts[i] = new IVCT_Verdict(); verdicts[i].verdict = IVCT_Verdict.Verdict.INCONCLUSIVE; verdicts[i].text = "Could not instantiate because of IVCTVersionCheckError " + classname; i++; continue; } verdicts[i] = testCase.execute(info.testCaseParam, tcLogger); tcLogger.info("Test Case Ended"); new CmdSendTcVerdict(info.sutName, info.sutDir, info.testSuiteId, testcases[i], verdicts[i].verdict.name(), verdicts[i].text).execute(); i++; } TcLoggerData.removeLogger(tcLogger.getName()); } } @Override public void onSetLogLevel(LogLevel level) { TcLoggerData.setLogLevel(level.name()); } @Override public void onQuit() { executorService.shutdown(); System.exit(0); } @Override public void onStartTestCase(TcInfo info) { Logger tcLogger = LoggerFactory.getLogger(info.testCaseId); // for enhanced RTI-Type-Information brf 06.11.2020 tcLogger.info("TestEngine.onStartTestCase get TCInfo.testEngineLabel \"" + info.testEngineLabel +"\"" ); tcLogger.info("TestEngine.onStartTestCase get TCInfo.settingsDesignator \""+ info.settingsDesignator +"\""); if ( ! verifyTestEngineLabel("onStartTestCase", tcLogger, info.testEngineLabel ) ) { return; } // if the TestEngineLabel is like makRti4.6* but the settingsDesignator is not for MAK RtI, stop here if ( info.testEngineLabel.toLowerCase().contains("mak".toLowerCase()) && !info.settingsDesignator.toLowerCase().contains("setqb".toLowerCase()) ) { tcLogger.warn("TestEngine is startet for MAK RTI but possibly got wrong settingsDesignator "); //return; } Runnable th1 = new TestScheduleRunner(info); Future<?> startedThread = executorService.submit(th1); threadCache.put(info.testCaseId, new SoftReference<>(startedThread)); tcLogger.info("Test Case Started: {}", info.testCaseId); } @Override public void onAbortTestCase(TcAbortInfo info) { Logger tcLogger = LoggerFactory.getLogger(info.testCaseId); // for RTI-Type-Information brf 07.12.2020 tcLogger.info("TestEngine.onAbortTestCase get TcAbortInfo.testEngineLabel \"" + info.testEngineLabel +"\"" ); if ( ! verifyTestEngineLabel("onAbortTestCase", tcLogger, info.testEngineLabel ) ) { return; } tcLogger.warn("Aborting the test case: {}", info.testCaseId); Future<?> threadToAbort = threadCache.get(info.testCaseId).get(); if (threadToAbort != null && !threadToAbort.isDone() && !threadToAbort.isCancelled()) { threadToAbort.cancel(true); tcLogger.warn("Test Case Aborted: {}", info.testCaseId); } else { tcLogger.warn("Test case could not be aborted: {} {}", info.testCaseId, threadToAbort); if (threadToAbort != null) tcLogger.warn("Thread isDone: {}, Thread is already canceled: {}", threadToAbort.isDone(), threadToAbort.isCancelled()); } } @Override public void onOperatorConfirmation(OperatorConfirmationInfo operatorConfirmationInfo) { // for enhanced RTI-Type-Information brf 07.12.2020 Logger tcLogger = LoggerFactory.getLogger(operatorConfirmationInfo.testCaseId); if (operatorConfirmationInfo.testEngineLabel != null) { tcLogger.info("Testengine.onOperatorConfirmation get OperatorConfirmationInfo.testEngineLabel: " + operatorConfirmationInfo.testEngineLabel); if (!verifyTestEngineLabel("onOperatorConfirmation", tcLogger, operatorConfirmationInfo.testEngineLabel)) { return; } } testCase.onOperatorConfirmation(operatorConfirmationInfo); } //for enhanced RTI-Type-Information brf 07.12.2020 private boolean verifyTestEngineLabel(String requestingMethod, Logger tcLogger, String testEngineLabel_) { boolean competence = true; if (!(testEngineLabel_.equals(testEngineLabel) || testEngineLabel_.equals(Factory.TESTENGINE_LABEL_DEFLT))) { tcLogger.info("TestEngine." + requestingMethod + ": This Job is not for this TestEngine - do not perfom\" "); competence = false; } return competence; } /* * implement a heartbeat , brf 05.07.2019 (Fraunhofer IOSB) CmdHeartbeatSend * will fetch all 5 Seconds the health state from 'here' and send all 5 Seconds * a message to ActiveMQ So if the value for health is changed here, this will * change the tenor of the message CmdHeartbeatSend sends to ActiveMQ if this * thread is stopped, CmdHeardbeatListen will give out an Alert-Status */ @Override public String getMyClassName() { return myClassName; } @Override public boolean getMyHealth() { return health; } // for enhanced heartbeat with RTI-Type-Information brf 22.10.2020 public String getMyTestEngineLabel() { return testEngineLabel; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.analytics.function.mapping; import java.time.Instant; import java.time.format.DateTimeParseException; import java.util.Date; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.analytics.value.AnalyticsValueStream; import org.apache.solr.analytics.value.DateValue; import org.apache.solr.analytics.value.DoubleValue; import org.apache.solr.analytics.value.FillableTestValue.TestDateValue; import org.apache.solr.analytics.value.FillableTestValue.TestDateValueStream; import org.apache.solr.analytics.value.FillableTestValue.TestDoubleValue; import org.apache.solr.analytics.value.FillableTestValue.TestDoubleValueStream; import org.apache.solr.analytics.value.FillableTestValue.TestFloatValue; import org.apache.solr.analytics.value.FillableTestValue.TestFloatValueStream; import org.apache.solr.analytics.value.FillableTestValue.TestIntValue; import org.apache.solr.analytics.value.FillableTestValue.TestIntValueStream; import org.apache.solr.analytics.value.FillableTestValue.TestLongValue; import org.apache.solr.analytics.value.FillableTestValue.TestLongValueStream; import org.apache.solr.analytics.value.FillableTestValue.TestStringValue; import org.apache.solr.analytics.value.FillableTestValue.TestStringValueStream; import org.apache.solr.analytics.value.FloatValue; import org.apache.solr.analytics.value.IntValue; import org.apache.solr.analytics.value.LongValue; import org.apache.solr.analytics.value.StringValue; import org.junit.Test; public class BottomFunctionTest extends SolrTestCaseJ4 { @Test public void multiValueIntTest() { TestIntValueStream val = new TestIntValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof IntValue); IntValue func = (IntValue) uncasted; // Neither exists val.setValues(); func.getInt(); assertFalse(func.exists()); // One exists val.setValues(30); assertEquals(30, func.getInt()); assertTrue(func.exists()); // Both exist val.setValues(30, 20, -10, 59); assertEquals(-10, func.getInt()); assertTrue(func.exists()); } @Test public void multiValueLongTest() { TestLongValueStream val = new TestLongValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof LongValue); LongValue func = (LongValue) uncasted; // Neither exists val.setValues(); func.getLong(); assertFalse(func.exists()); // One exists val.setValues(30L); assertEquals(30L, func.getLong()); assertTrue(func.exists()); // Both exist val.setValues(30L, 20L, -10L, 59L); assertEquals(-10L, func.getLong()); assertTrue(func.exists()); } @Test public void multiValueFloatTest() { TestFloatValueStream val = new TestFloatValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof FloatValue); FloatValue func = (FloatValue) uncasted; // Neither exists val.setValues(); func.getFloat(); assertFalse(func.exists()); // One exists val.setValues(30.0F); assertEquals(30.0F, func.getFloat(), .000001); assertTrue(func.exists()); // Both exist val.setValues(30.5F, 20.01F, -10.49F, -10.48F); assertEquals(-10.49F, func.getFloat(), .000001); assertTrue(func.exists()); } @Test public void multiValueDoubleTest() { TestDoubleValueStream val = new TestDoubleValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof DoubleValue); DoubleValue func = (DoubleValue) uncasted; // Neither exists val.setValues(); func.getDouble(); assertFalse(func.exists()); // One exists val.setValues(30.0); assertEquals(30.0, func.getDouble(), .000001); assertTrue(func.exists()); // Both exist val.setValues(30.5, 20.01, -10.49, -10.48); assertEquals(-10.49, func.getDouble(), .000001); assertTrue(func.exists()); } @Test public void multiValueDateTest() throws DateTimeParseException { TestDateValueStream val = new TestDateValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof DateValue); DateValue func = (DateValue) uncasted; // Neither exists val.setValues(); func.getDate(); assertFalse(func.exists()); // One exists val.setValues("1950-05-03T10:30:50Z"); assertEquals(Date.from(Instant.parse("1950-05-03T10:30:50Z")), func.getDate()); assertTrue(func.exists()); // Both exist val.setValues( "1950-05-03T10:30:50Z", "2200-01-01T10:00:50Z", "1800-12-31T11:30:50Z", "1930-05-020T10:45:50Z"); assertEquals(Date.from(Instant.parse("1800-12-31T11:30:50Z")), func.getDate()); assertTrue(func.exists()); } @Test public void multiValueStringTest() { TestStringValueStream val = new TestStringValueStream(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val}); assertTrue(uncasted instanceof StringValue); StringValue func = (StringValue) uncasted; // Neither exists val.setValues(); func.getString(); assertFalse(func.exists()); // One exists val.setValues("abc"); assertEquals("abc", func.getString()); assertTrue(func.exists()); // Both exist val.setValues("1abcdef", "abc", "def", "1abc"); assertEquals("1abc", func.getString()); assertTrue(func.exists()); } @Test public void multipleSingleValueIntTest() { TestIntValue val1 = new TestIntValue(); TestIntValue val2 = new TestIntValue(); TestIntValue val3 = new TestIntValue(); TestIntValue val4 = new TestIntValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof IntValue); IntValue func = (IntValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getInt(); assertFalse(func.exists()); // Some exist val1.setValue(1000).setExists(false); val2.setValue(30).setExists(true); val3.setValue(-1000).setExists(false); val4.setValue(12).setExists(true); assertEquals(12, func.getInt()); assertTrue(func.exists()); // All exist values, one value val1.setValue(45).setExists(true); val2.setValue(30).setExists(true); val3.setValue(-2).setExists(true); val4.setValue(12).setExists(true); assertEquals(-2, func.getInt()); assertTrue(func.exists()); } @Test public void multipleSingleValueLongTest() { TestLongValue val1 = new TestLongValue(); TestLongValue val2 = new TestLongValue(); TestLongValue val3 = new TestLongValue(); TestLongValue val4 = new TestLongValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof LongValue); LongValue func = (LongValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getLong(); assertFalse(func.exists()); // Some exist val1.setValue(1000L).setExists(false); val2.setValue(30L).setExists(true); val3.setValue(-1000L).setExists(false); val4.setValue(12L).setExists(true); assertEquals(12L, func.getLong()); assertTrue(func.exists()); // All exist values, one value val1.setValue(45L).setExists(true); val2.setValue(30L).setExists(true); val3.setValue(-2L).setExists(true); val4.setValue(12L).setExists(true); assertEquals(-2L, func.getLong()); assertTrue(func.exists()); } @Test public void multipleSingleValueFloatTest() { TestFloatValue val1 = new TestFloatValue(); TestFloatValue val2 = new TestFloatValue(); TestFloatValue val3 = new TestFloatValue(); TestFloatValue val4 = new TestFloatValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof FloatValue); FloatValue func = (FloatValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getFloat(); assertFalse(func.exists()); // Some exist val1.setValue(1000.1233F).setExists(false); val2.setValue(30.34F).setExists(true); val3.setValue(-1000.3241F).setExists(false); val4.setValue(12.123F).setExists(true); assertEquals(12.123F, func.getFloat(), .000001); assertTrue(func.exists()); // All exist values, one value val1.setValue(45.43F).setExists(true); val2.setValue(30.231F).setExists(true); val3.setValue(-2.33F).setExists(true); val4.setValue(12.5F).setExists(true); assertEquals(-2.33F, func.getFloat(), .000001); assertTrue(func.exists()); } @Test public void multipleSingleValueDoubleTest() { TestDoubleValue val1 = new TestDoubleValue(); TestDoubleValue val2 = new TestDoubleValue(); TestDoubleValue val3 = new TestDoubleValue(); TestDoubleValue val4 = new TestDoubleValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof DoubleValue); DoubleValue func = (DoubleValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getDouble(); assertFalse(func.exists()); // Some exist val1.setValue(1000.1233).setExists(false); val2.setValue(30.34).setExists(true); val3.setValue(-1000.3241).setExists(false); val4.setValue(12.123).setExists(true); assertEquals(12.123, func.getDouble(), .000001); assertTrue(func.exists()); // All exist values, one value val1.setValue(45.43).setExists(true); val2.setValue(30.231).setExists(true); val3.setValue(-2.33).setExists(true); val4.setValue(12.5).setExists(true); assertEquals(-2.33, func.getDouble(), .000001); assertTrue(func.exists()); } @Test public void multipleSingleValueDateTest() throws DateTimeParseException { TestDateValue val1 = new TestDateValue(); TestDateValue val2 = new TestDateValue(); TestDateValue val3 = new TestDateValue(); TestDateValue val4 = new TestDateValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof DateValue); DateValue func = (DateValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getDate(); assertFalse(func.exists()); // Some exist val1.setValue("9999-05-03T10:30:50Z").setExists(false); val2.setValue("1950-05-03T10:30:50Z").setExists(true); val3.setValue("0000-05-03T10:30:50Z").setExists(false); val4.setValue("1850-05-03T10:30:50Z").setExists(true); assertEquals(Date.from(Instant.parse("1850-05-03T10:30:50Z")), func.getDate()); assertTrue(func.exists()); // All exist values, one value val1.setValue("2200-05-03T10:30:50Z").setExists(true); val2.setValue("1950-05-03T10:30:50Z").setExists(true); val3.setValue("1700-05-03T10:30:50Z").setExists(true); val4.setValue("1850-05-03T10:30:50Z").setExists(true); assertEquals(Date.from(Instant.parse("1700-05-03T10:30:50Z")), func.getDate()); assertTrue(func.exists()); } @Test public void multipleStringValueDateTest() { TestStringValue val1 = new TestStringValue(); TestStringValue val2 = new TestStringValue(); TestStringValue val3 = new TestStringValue(); TestStringValue val4 = new TestStringValue(); AnalyticsValueStream uncasted = BottomFunction.creatorFunction.apply(new AnalyticsValueStream[] {val1, val2, val3, val4}); assertTrue(uncasted instanceof StringValue); StringValue func = (StringValue) uncasted; // None exist val1.setExists(false); val2.setExists(false); val3.setExists(false); val4.setExists(false); func.getString(); assertFalse(func.exists()); // Some exist val1.setValue("abc").setExists(true); val2.setValue("1111").setExists(false); val3.setValue("asdfads").setExists(true); val4.setValue("zzzzzzzz").setExists(false); assertEquals("abc", func.getString()); assertTrue(func.exists()); // All exist values, one value val1.setValue("abc").setExists(true); val2.setValue("abc1234").setExists(true); val3.setValue("asdfads").setExists(true); val4.setValue("fdgsfg").setExists(true); assertEquals("abc", func.getString()); assertTrue(func.exists()); } }
package coursier.paths; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; public class Util { // No real reason to put that in the path module, except from the fact that that makes this accessible // from both the bootstrap-launcher and cli modules. private static final Pattern propertyRegex = Pattern.compile(Pattern.quote("${") + "[^" + Pattern.quote("{[()]}") + "]*" + Pattern.quote("}")); public static Map<String, String> expandProperties(Map<String, String> properties) { return expandProperties(System.getProperties(), properties); } public static Map<String, String> expandProperties( Properties systemProperties, Map<String, String> properties) { final Map<String, String> resolved = new LinkedHashMap<>(properties.size()); final Map<String, String> withProps = new LinkedHashMap<>(properties.size()); for (String k : properties.keySet()) { String value = properties.get(k); String actualKey = k; boolean process = true; if (k.endsWith("?")) { actualKey = k.substring(0, k.length() - 1); process = !systemProperties.containsKey(actualKey); } if (process) { Matcher matcher = propertyRegex.matcher(value); if (matcher.find()) { withProps.put(actualKey, value); } else { resolved.put(actualKey, value); } } } // we don't go recursive here - dynamic properties can only reference static ones for (String k : withProps.keySet()) { String value = withProps.get(k); Matcher matcher = propertyRegex.matcher(value); // cycles would loop indefinitely here :-| while (matcher.find()) { int start = matcher.start(0); int end = matcher.end(0); String subKey = value.substring(start + 2, end - 1); String subValue = resolved.get(subKey); if (subValue == null) subValue = systemProperties.getProperty(subKey); if (subValue == null) subValue = ""; // throw instead? value = value.substring(0, start) + subValue + value.substring(end); } resolved.put(k, value); } return resolved; } public static void createDirectories(Path path) throws IOException { try { Files.createDirectories(path); } catch (FileAlreadyExistsException ex) { // see https://bugs.openjdk.java.net/browse/JDK-8130464 // Files.createDirectories does that check too, but with LinkOptions.NOFOLLOW_LINKS if (!Files.isDirectory(path)) throw ex; } } private static volatile Boolean useColorOutput0 = null; private static boolean computeUseColorOutput() { if (System.getenv("INSIDE_EMACS") != null) return false; boolean disableViaEnv; String envProgress = System.getenv("COURSIER_PROGRESS"); if (envProgress != null && (envProgress.equalsIgnoreCase("true") || envProgress.equalsIgnoreCase("enable") || envProgress.equalsIgnoreCase("1"))) { disableViaEnv = false; } else if (envProgress != null && (envProgress.equalsIgnoreCase("false") || envProgress.equalsIgnoreCase("disable") || envProgress.equalsIgnoreCase("0"))) { disableViaEnv = true; } else { disableViaEnv = System.getenv("COURSIER_NO_TERM") != null; } if (disableViaEnv) return false; return true; } // a bit more loose than useAnsiOutput (doesn't look at System.console() == null or System.getenv("CI")) public static boolean useColorOutput() { if (useColorOutput0 == null) { useColorOutput0 = computeUseColorOutput(); } return useColorOutput0; } private static volatile Boolean useAnsiOutput0 = null; private static boolean computeUseAnsiOutput() { if (System.console() == null) return false; if (System.getenv("INSIDE_EMACS") != null) return false; if (System.getenv("CI") != null) return false; boolean disableViaEnv; String envProgress = System.getenv("COURSIER_PROGRESS"); if (envProgress != null && (envProgress.equalsIgnoreCase("true") || envProgress.equalsIgnoreCase("enable") || envProgress.equalsIgnoreCase("1"))) { disableViaEnv = false; } else if (envProgress != null && (envProgress.equalsIgnoreCase("false") || envProgress.equalsIgnoreCase("disable") || envProgress.equalsIgnoreCase("0"))) { disableViaEnv = true; } else { disableViaEnv = System.getenv("COURSIER_NO_TERM") != null; } if (disableViaEnv) return false; return true; } public static boolean useAnsiOutput() { if (useAnsiOutput0 == null) { useAnsiOutput0 = computeUseAnsiOutput(); } return useAnsiOutput0; } private static Boolean useJni0 = null; public static boolean useJni() { return useJni(() -> {}); } public static boolean useJni(Runnable beforeJni) { if (useJni0 != null) return useJni0; boolean isWindows = System.getProperty("os.name") .toLowerCase(Locale.ROOT) .contains("windows"); if (!isWindows) { useJni0 = false; return useJni0; } String prop = System.getenv("COURSIER_JNI"); if (prop == null || prop.isEmpty()) prop = System.getProperty("coursier.jni", ""); boolean force = prop.equalsIgnoreCase("force"); if (force) { beforeJni.run(); useJni0 = true; return useJni0; } boolean disabled = prop.equalsIgnoreCase("false"); if (disabled) { useJni0 = false; return useJni0; } // Try to get a dummy user env var from registry. If it fails, assume the JNI stuff is broken, // and fallback on PowerShell scripts. try { beforeJni.run(); coursier.jniutils.WindowsEnvironmentVariables.get("PATH"); useJni0 = true; } catch (Throwable t) { if (System.getProperty("coursier.jni.check.throw", "").equalsIgnoreCase("true")) throw new RuntimeException(t); useJni0 = false; } return useJni0; } }
package com.mapswithme.maps.ugc.routes; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.ActivityCompat; import android.support.v7.widget.DividerItemDecoration; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Pair; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.mapswithme.maps.R; import com.mapswithme.maps.adapter.AdapterPositionConverter; import com.mapswithme.maps.adapter.OnItemClickListener; import com.mapswithme.maps.adapter.RecyclerCompositeAdapter; import com.mapswithme.maps.adapter.RepeatablePairPositionConverter; import com.mapswithme.maps.adapter.TagGroupNameAdapter; import com.mapswithme.maps.adapter.TagsAdapter; import com.mapswithme.maps.adapter.TagsCompositeAdapter; import com.mapswithme.maps.base.BaseMwmFragment; import com.mapswithme.maps.bookmarks.data.BookmarkManager; import com.mapswithme.maps.bookmarks.data.CatalogCustomProperty; import com.mapswithme.maps.bookmarks.data.CatalogTag; import com.mapswithme.maps.bookmarks.data.CatalogTagsGroup; import com.mapswithme.maps.dialog.AlertDialog; import com.mapswithme.maps.dialog.AlertDialogCallback; import com.mapswithme.maps.widget.recycler.ItemDecoratorFactory; import com.mapswithme.util.UiUtils; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; public class UgcRouteTagsFragment extends BaseMwmFragment implements BookmarkManager.BookmarksCatalogListener, OnItemClickListener<Pair<TagsAdapter, TagsAdapter.TagViewHolder>>, AlertDialogCallback { private static final String BUNDLE_SELECTED_TAGS = "bundle_saved_tags"; private static final String ERROR_LOADING_DIALOG_TAG = "error_loading_dialog"; private static final int ERROR_LOADING_DIALOG_REQ_CODE = 205; @SuppressWarnings("NullableProblems") @NonNull private RecyclerView mRecycler; @SuppressWarnings("NullableProblems") @NonNull private View mProgress; @SuppressWarnings("NullableProblems") @NonNull private ViewGroup mTagsContainer; @Nullable private Bundle mSavedInstanceState; @Nullable private TagsCompositeAdapter mTagsAdapter; @SuppressWarnings("NullableProblems") @NonNull private TextView mDescriptionView; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { ViewGroup root = (ViewGroup) inflater.inflate(R.layout.fragment_ugc_routes, container,false); setHasOptionsMenu(true); mProgress = root.findViewById(R.id.progress_container); mTagsContainer = root.findViewById(R.id.tags_container); mDescriptionView = root.findViewById(R.id.ugc_route_tags_desc); initRecycler(root); UiUtils.hide(mTagsContainer); UiUtils.show(mProgress); BookmarkManager.INSTANCE.requestRouteTags(); mSavedInstanceState = savedInstanceState; return root; } private void initRecycler(@NonNull ViewGroup root) { mRecycler = root.findViewById(R.id.recycler); mRecycler.setItemAnimator(null); RecyclerView.ItemDecoration decor = ItemDecoratorFactory.createRatingRecordDecorator( getContext().getApplicationContext(), DividerItemDecoration.VERTICAL, R.drawable.divider_transparent_half_plus_eight); mRecycler.addItemDecoration(decor); } private void onRetryClicked() { UiUtils.hide(mTagsContainer); UiUtils.show(mProgress); BookmarkManager.INSTANCE.requestRouteTags(); } private void showErrorLoadingDialog() { AlertDialog dialog = new AlertDialog.Builder() .setTitleId(R.string.title_error_downloading_bookmarks) .setMessageId(R.string.tags_loading_error_subtitle) .setPositiveBtnId(R.string.try_again) .setNegativeBtnId(R.string.cancel) .setReqCode(ERROR_LOADING_DIALOG_REQ_CODE) .setFragManagerStrategyType(AlertDialog.FragManagerStrategyType.ACTIVITY_FRAGMENT_MANAGER) .build(); dialog.setTargetFragment(this, ERROR_LOADING_DIALOG_REQ_CODE); dialog.show(this, ERROR_LOADING_DIALOG_TAG); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.menu_done, menu); } @Override public void onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); MenuItem item = menu.findItem(R.id.done); item.setVisible(hasSelectedItems()); } private boolean hasSelectedItems() { return mTagsAdapter != null && mTagsAdapter.hasSelectedItems(); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (item.getItemId() == R.id.done) { onDoneOptionItemClicked(); return true; } return super.onOptionsItemSelected(item); } private void onDoneOptionItemClicked() { if (mTagsAdapter == null) return; ArrayList<CatalogTag> value = new ArrayList<>(mTagsAdapter.getSelectedTags()); Intent result = new Intent().putParcelableArrayListExtra(UgcRouteTagsActivity.EXTRA_TAGS, value); getActivity().setResult(Activity.RESULT_OK, result); getActivity().finish(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (mTagsAdapter != null) outState.putParcelableArrayList(BUNDLE_SELECTED_TAGS, new ArrayList<>(mTagsAdapter.getSelectedTags())); } @Override public void onStart() { super.onStart(); BookmarkManager.INSTANCE.addCatalogListener(this); } @Override public void onStop() { super.onStop(); BookmarkManager.INSTANCE.removeCatalogListener(this); } @Override public void onImportStarted(@NonNull String serverId) { /* Do nothing by default */ } @Override public void onImportFinished(@NonNull String serverId, long catId, boolean successful) { /* Do nothing by default */ } @Override public void onTagsReceived(boolean successful, @NonNull List<CatalogTagsGroup> tagsGroups, int tagsLimit) { UiUtils.showIf(successful && tagsGroups.size() != 0, mTagsContainer); UiUtils.hide(mProgress); if (tagsGroups.size() == 0 || !successful) { showErrorLoadingDialog(); return; } installTags(tagsGroups, tagsLimit); } @Override public void onCustomPropertiesReceived(boolean successful, @NonNull List<CatalogCustomProperty> properties) { /* Not ready yet */ } private void installTags(@NonNull List<CatalogTagsGroup> tagsGroups, int tagsLimit) { List<CatalogTag> savedStateTags = validateSavedState(mSavedInstanceState); TagGroupNameAdapter categoryAdapter = new TagGroupNameAdapter(tagsGroups); mTagsAdapter = new TagsCompositeAdapter(getContext(), tagsGroups, savedStateTags, this, tagsLimit); RecyclerCompositeAdapter compositeAdapter = makeCompositeAdapter(categoryAdapter, mTagsAdapter); LinearLayoutManager layoutManager = new LinearLayoutManager(getContext(), LinearLayoutManager.VERTICAL, false); mRecycler.setLayoutManager(layoutManager); mRecycler.setAdapter(compositeAdapter); String description = getString(R.string.ugc_route_tags_desc, String.valueOf(tagsLimit)); mDescriptionView.setText(description); requireActivity().invalidateOptionsMenu(); } @NonNull private static List<CatalogTag> validateSavedState(@Nullable Bundle savedState) { List<CatalogTag> tags; if (savedState == null || (tags = savedState.getParcelableArrayList(BUNDLE_SELECTED_TAGS)) == null) return Collections.emptyList(); return tags; } @NonNull private static RecyclerCompositeAdapter makeCompositeAdapter(@NonNull TagGroupNameAdapter categoryAdapter, @NonNull TagsCompositeAdapter tagsCompositeAdapter) { AdapterPositionConverter converter = new RepeatablePairPositionConverter(categoryAdapter, tagsCompositeAdapter); return new RecyclerCompositeAdapter(converter, categoryAdapter, tagsCompositeAdapter); } @Override public void onUploadStarted(long originCategoryId) { /* Do nothing by default */ } @Override public void onUploadFinished(@NonNull BookmarkManager.UploadResult uploadResult, @NonNull String description, long originCategoryId, long resultCategoryId) { /* Do nothing by default */ } @Override public void onItemClick(@NonNull View v, @NonNull Pair<TagsAdapter, TagsAdapter.TagViewHolder> item) { ActivityCompat.invalidateOptionsMenu(getActivity()); Objects.requireNonNull(mTagsAdapter); for (int i = 0; i < mTagsAdapter.getItemCount(); i++) { mTagsAdapter.getItem(i).notifyDataSetChanged(); } } @Override public void onAlertDialogPositiveClick(int requestCode, int which) { onRetryClicked(); } @Override public void onAlertDialogNegativeClick(int requestCode, int which) { getActivity().setResult(Activity.RESULT_CANCELED); getActivity().finish(); } @Override public void onAlertDialogCancel(int requestCode) { getActivity().setResult(Activity.RESULT_CANCELED); getActivity().finish(); } }
/*L * Copyright (c) 2006 SAIC, SAIC-F. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/rembrandt/LICENSE.txt for details. */ package gov.nih.nci.rembrandt.queryservice.resultset.geneExpressionPlot; import gov.nih.nci.caintegrator.dto.de.DatumDE; import gov.nih.nci.caintegrator.dto.de.DiseaseNameDE; import gov.nih.nci.caintegrator.dto.de.GeneIdentifierDE; import gov.nih.nci.caintegrator.util.MathUtil; import gov.nih.nci.rembrandt.dto.lookup.DiseaseTypeLookup; import gov.nih.nci.rembrandt.dto.lookup.LookupManager; import gov.nih.nci.rembrandt.queryservice.queryprocessing.ge.GeneExpr; import gov.nih.nci.rembrandt.queryservice.queryprocessing.ge.GeneExpr.GeneExprGroup; import gov.nih.nci.rembrandt.queryservice.queryprocessing.ge.UnifiedGeneExpr.UnifiedGeneExprGroup; import gov.nih.nci.rembrandt.util.RembrandtConstants; /** * @author SahniH * Date: Nov 9, 2004 * */ /** * caIntegrator License * * Copyright 2001-2005 Science Applications International Corporation ("SAIC"). * The software subject to this notice and license includes both human readable source code form and machine readable, * binary, object code form ("the caIntegrator Software"). The caIntegrator Software was developed in conjunction with * the National Cancer Institute ("NCI") by NCI employees and employees of SAIC. * To the extent government employees are authors, any rights in such works shall be subject to Title 17 of the United States * Code, section 105. * This caIntegrator Software License (the "License") is between NCI and You. "You (or "Your") shall mean a person or an * entity, and all other entities that control, are controlled by, or are under common control with the entity. "Control" * for purposes of this definition means (i) the direct or indirect power to cause the direction or management of such entity, * whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) * beneficial ownership of such entity. * This License is granted provided that You agree to the conditions described below. NCI grants You a non-exclusive, * worldwide, perpetual, fully-paid-up, no-charge, irrevocable, transferable and royalty-free right and license in its rights * in the caIntegrator Software to (i) use, install, access, operate, execute, copy, modify, translate, market, publicly * display, publicly perform, and prepare derivative works of the caIntegrator Software; (ii) distribute and have distributed * to and by third parties the caIntegrator Software and any modifications and derivative works thereof; * and (iii) sublicense the foregoing rights set out in (i) and (ii) to third parties, including the right to license such * rights to further third parties. For sake of clarity, and not by way of limitation, NCI shall have no right of accounting * or right of payment from You or Your sublicensees for the rights granted under this License. This License is granted at no * charge to You. * 1. Your redistributions of the source code for the Software must retain the above copyright notice, this list of conditions * and the disclaimer and limitation of liability of Article 6, below. Your redistributions in object code form must reproduce * the above copyright notice, this list of conditions and the disclaimer of Article 6 in the documentation and/or other materials * provided with the distribution, if any. * 2. Your end-user documentation included with the redistribution, if any, must include the following acknowledgment: "This * product includes software developed by SAIC and the National Cancer Institute." If You do not include such end-user * documentation, You shall include this acknowledgment in the Software itself, wherever such third-party acknowledgments * normally appear. * 3. You may not use the names "The National Cancer Institute", "NCI" "Science Applications International Corporation" and * "SAIC" to endorse or promote products derived from this Software. This License does not authorize You to use any * trademarks, service marks, trade names, logos or product names of either NCI or SAIC, except as required to comply with * the terms of this License. * 4. For sake of clarity, and not by way of limitation, You may incorporate this Software into Your proprietary programs and * into any third party proprietary programs. However, if You incorporate the Software into third party proprietary * programs, You agree that You are solely responsible for obtaining any permission from such third parties required to * incorporate the Software into such third party proprietary programs and for informing Your sublicensees, including * without limitation Your end-users, of their obligation to secure any required permissions from such third parties * before incorporating the Software into such third party proprietary software programs. In the event that You fail * to obtain such permissions, You agree to indemnify NCI for any claims against NCI by such third parties, except to * the extent prohibited by law, resulting from Your failure to obtain such permissions. * 5. For sake of clarity, and not by way of limitation, You may add Your own copyright statement to Your modifications and * to the derivative works, and You may provide additional or different license terms and conditions in Your sublicenses * of modifications of the Software, or any derivative works of the Software as a whole, provided Your use, reproduction, * and distribution of the Work otherwise complies with the conditions stated in this License. * 6. THIS SOFTWARE IS PROVIDED "AS IS," AND ANY EXPRESSED OR IMPLIED WARRANTIES, (INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY, NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE) ARE DISCLAIMED. * IN NO EVENT SHALL THE NATIONAL CANCER INSTITUTE, SAIC, OR THEIR AFFILIATES BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE * GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ public class GeneExprDiseasePlotHandler { /* This handler assembls a GeneExprPlot resultset from disease grouped resultset * */ public static GeneExprDiseasePlotContainer handleGeneExprDiseaseView(GeneExprDiseasePlotContainer geneExprDiseasePlotContainer, GeneExpr.GeneExprGroup exprObj) throws Exception{ DiseaseGeneExprPlotResultset diseaseResultset = null; ReporterFoldChangeValuesResultset reporterResultset = null; if (geneExprDiseasePlotContainer != null && exprObj != null){ geneExprDiseasePlotContainer.setGeneSymbol(new GeneIdentifierDE.GeneSymbol (exprObj.getGeneSymbol())); //geneExprDiseasePlotContainer = handleDiseaseGeneExprPlotResultset(geneExprDiseasePlotContainer, exprObj); diseaseResultset = geneExprDiseasePlotContainer.getDiseaseGeneExprPlotResultset(exprObj.getDiseaseType()); reporterResultset = handleReporterFoldChangeValuesResultset(diseaseResultset,exprObj); diseaseResultset.addReporterFoldChangeValuesResultset(reporterResultset); geneExprDiseasePlotContainer.addDiseaseGeneExprPlotResultset(diseaseResultset); } return geneExprDiseasePlotContainer; } /* This handler assembls a GeneExprPlot resultset from unified disease grouped resultset * */ public static GeneExprDiseasePlotContainer handleUnifiedGeneExprDiseaseView(GeneExprDiseasePlotContainer geneExprDiseasePlotContainer, UnifiedGeneExprGroup exprObj) throws Exception{ DiseaseGeneExprPlotResultset diseaseResultset = null; ReporterFoldChangeValuesResultset reporterResultset = null; if (geneExprDiseasePlotContainer != null && exprObj != null){ geneExprDiseasePlotContainer.setGeneSymbol(new GeneIdentifierDE.GeneSymbol (exprObj.getGeneSymbol())); //geneExprDiseasePlotContainer = handleDiseaseGeneExprPlotResultset(geneExprDiseasePlotContainer, exprObj); diseaseResultset = geneExprDiseasePlotContainer.getDiseaseGeneExprPlotResultset(exprObj.getDiseaseType()); reporterResultset = handleReporterFoldChangeValuesResultset(diseaseResultset,exprObj); diseaseResultset.addReporterFoldChangeValuesResultset(reporterResultset); geneExprDiseasePlotContainer.addDiseaseGeneExprPlotResultset(diseaseResultset); } return geneExprDiseasePlotContainer; } /** * @param diseaseResultset * @param exprObj * @return */ private static ReporterFoldChangeValuesResultset handleReporterFoldChangeValuesResultset(DiseaseGeneExprPlotResultset diseaseResultset, GeneExprGroup exprObj) { // find out if it has a probeset or a clone associated with it //populate ReporterResultset with the approciate one ReporterFoldChangeValuesResultset reporterResultset = null; if(diseaseResultset != null && exprObj != null){ //TODO:only Affy Probesets for now if(exprObj.getProbesetName() != null){ DatumDE reporter = new DatumDE(DatumDE.PROBESET_ID,exprObj.getProbesetName()); reporterResultset = diseaseResultset.getReporterFoldChangeValuesResultset(exprObj.getProbesetName().toString()); if(reporterResultset == null){ reporterResultset = new ReporterFoldChangeValuesResultset(reporter); } reporterResultset.setRatioPval(new DatumDE(DatumDE.FOLD_CHANGE_RATIO_PVAL,exprObj.getRatioPval())); reporterResultset.setFoldChangeIntensity(new DatumDE(DatumDE.FOLD_CHANGE_SAMPLE_INTENSITY,exprObj.getSampleIntensity())); reporterResultset.setFoldChangeLog2Intensity(new DatumDE(DatumDE.FOLD_CHANGE_LOG2_INTENSITY,MathUtil.getLog2(exprObj.getSampleIntensity()))); reporterResultset.setStandardDeviationRatio(new DatumDE(DatumDE.STD_DEVIATION_RATIO,exprObj.getStandardDeviationRatio())); } } return reporterResultset; } /** * @param diseaseResultset * @param exprObj * @return */ private static ReporterFoldChangeValuesResultset handleReporterFoldChangeValuesResultset(DiseaseGeneExprPlotResultset diseaseResultset, UnifiedGeneExprGroup exprObj) { // find out if it has a probeset or a clone associated with it //populate ReporterResultset with the approciate one ReporterFoldChangeValuesResultset reporterResultset = null; if(diseaseResultset != null && exprObj != null){ //TODO:only Affy Probesets for now if(exprObj.getUnifiedGeneID() != null){ DatumDE reporter = new DatumDE(DatumDE.UNIFIED_GENE_ID,exprObj.getUnifiedGeneID()); reporterResultset = diseaseResultset.getReporterFoldChangeValuesResultset(exprObj.getUnifiedGeneID()); if(reporterResultset == null){ reporterResultset = new ReporterFoldChangeValuesResultset(reporter); } reporterResultset.setRatioPval(new DatumDE(DatumDE.FOLD_CHANGE_RATIO_PVAL,exprObj.getRatioPval())); reporterResultset.setFoldChangeIntensity(new DatumDE(DatumDE.FOLD_CHANGE_SAMPLE_INTENSITY,exprObj.getSampleIntensity())); reporterResultset.setFoldChangeLog2Intensity(new DatumDE(DatumDE.FOLD_CHANGE_LOG2_INTENSITY,MathUtil.getLog2(exprObj.getNormalIntensity()))); } } return reporterResultset; } /** * @param diseaseResultset * @param exprObj * @return */ public static GeneExprDiseasePlotContainer handleNoramlAsDisease(GeneExprDiseasePlotContainer geneExprDiseasePlotContainer, GeneExprGroup exprObj) { // find out if it has a probeset or a clone associated with it //populate ReporterResultset with the approciate one ReporterFoldChangeValuesResultset reporterResultset = null; DiseaseGeneExprPlotResultset non_tumor = null; if(geneExprDiseasePlotContainer != null && exprObj != null){ //TODO:only Affy Probesets for now if(exprObj.getProbesetName() != null){ non_tumor = geneExprDiseasePlotContainer.getDiseaseGeneExprPlotResultset(RembrandtConstants.NON_TUMOR); DatumDE reporter = new DatumDE(DatumDE.PROBESET_ID,exprObj.getProbesetName()); reporterResultset = non_tumor.getReporterFoldChangeValuesResultset(exprObj.getProbesetName().toString()); if(reporterResultset == null){ reporterResultset = new ReporterFoldChangeValuesResultset(reporter); } reporterResultset.setRatioPval(new DatumDE(DatumDE.FOLD_CHANGE_RATIO_PVAL,new Double("0.00")));//TODO: Should be changed to repecial value reporterResultset.setFoldChangeIntensity(new DatumDE(DatumDE.FOLD_CHANGE_SAMPLE_INTENSITY,exprObj.getNormalIntensity())); reporterResultset.setStandardDeviationRatio(new DatumDE(DatumDE.STD_DEVIATION_RATIO,exprObj.getStandardDeviationRatio())); reporterResultset.setFoldChangeLog2Intensity(new DatumDE(DatumDE.FOLD_CHANGE_LOG2_INTENSITY,MathUtil.getLog2(exprObj.getNormalIntensity()))); } geneExprDiseasePlotContainer.addDiseaseGeneExprPlotResultset(non_tumor); non_tumor.addReporterFoldChangeValuesResultset(reporterResultset); } return geneExprDiseasePlotContainer; } /** * @param geneExprDiseasePlotContainer * @param exprObj * @return * @throws Exception */ public static GeneExprDiseasePlotContainer handleDiseaseGeneExprPlotResultset(GeneExprDiseasePlotContainer geneExprDiseasePlotContainer) throws Exception { //find out the disease type associated with the exprObj //populate the DiseaseTypeResultset DiseaseGeneExprPlotResultset diseaseResultset = null; DiseaseTypeLookup[] diseaseTypes = LookupManager.getDiseaseType(); for(int i = 0; i< diseaseTypes.length ; i++){ DiseaseNameDE disease = new DiseaseNameDE(diseaseTypes[i].getDiseaseType().toString()); diseaseResultset= new DiseaseGeneExprPlotResultset(disease); geneExprDiseasePlotContainer.addDiseaseGeneExprPlotResultset(diseaseResultset); } return geneExprDiseasePlotContainer; } public static GeneExprDiseasePlotContainer handleNoramlAsDisease(GeneExprDiseasePlotContainer geneExprDiseasePlotContainer, UnifiedGeneExprGroup exprObj) { // find out if it has a probeset or a clone associated with it //populate ReporterResultset with the approciate one ReporterFoldChangeValuesResultset reporterResultset = null; DiseaseGeneExprPlotResultset non_tumor = null; if(geneExprDiseasePlotContainer != null && exprObj != null){ //TODO:only Affy Probesets for now if(exprObj.getUnifiedGeneID() != null){ non_tumor = geneExprDiseasePlotContainer.getDiseaseGeneExprPlotResultset(RembrandtConstants.NON_TUMOR); DatumDE reporter = new DatumDE(DatumDE.UNIFIED_GENE_ID,exprObj.getUnifiedGeneID()); reporterResultset = non_tumor.getReporterFoldChangeValuesResultset(exprObj.getUnifiedGeneID().toString()); if(reporterResultset == null){ reporterResultset = new ReporterFoldChangeValuesResultset(reporter); } reporterResultset.setRatioPval(new DatumDE(DatumDE.FOLD_CHANGE_RATIO_PVAL,new Double("0.00")));//TODO: Should be changed to repecial value reporterResultset.setFoldChangeIntensity(new DatumDE(DatumDE.FOLD_CHANGE_SAMPLE_INTENSITY,exprObj.getNormalIntensity())); reporterResultset.setFoldChangeLog2Intensity(new DatumDE(DatumDE.FOLD_CHANGE_LOG2_INTENSITY,MathUtil.getLog2(exprObj.getNormalIntensity()))); } geneExprDiseasePlotContainer.addDiseaseGeneExprPlotResultset(non_tumor); non_tumor.addReporterFoldChangeValuesResultset(reporterResultset); } return geneExprDiseasePlotContainer; } }
/* * Copyright 2011 Robert Theis * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.luklanis.esscan; import android.app.ProgressDialog; import android.os.AsyncTask; import android.util.Log; import com.googlecode.tesseract.android.TessBaseAPI; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * Installs the language data required for OCR, and initializes the OCR engine * using a background thread. */ public final class OcrInitAsyncTask extends AsyncTask<String, String, Boolean> { private static final String TAG = OcrInitAsyncTask.class.getSimpleName(); private IBase base; private TessBaseAPI baseApi; private ProgressDialog dialog; private final String languageCode; private String languageName; private int ocrEngineMode; /** * AsyncTask to asynchronously download data and initialize Tesseract. * * @param base The calling activity * @param baseApi API to the OCR engine * @param dialog Dialog box with thermometer progress indicator * @param indeterminateDialog Dialog box with indeterminate progress indicator * @param languageCode ISO 639-2 OCR language code * @param languageName Name of the OCR language, for example, "English" * @param ocrEngineMode Whether to use Tesseract, Cube, or both */ public OcrInitAsyncTask(IBase base, TessBaseAPI baseApi, ProgressDialog dialog, String languageCode, String languageName, int ocrEngineMode) { this.base = base; this.baseApi = baseApi; this.dialog = dialog; this.languageCode = languageCode; this.languageName = languageName; this.ocrEngineMode = ocrEngineMode; } @Override protected void onPreExecute() { super.onPreExecute(); if (dialog == null) { return; } dialog.setTitle("Please wait"); dialog.setMessage("Checking for data installation..."); dialog.setIndeterminate(false); dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); dialog.setCancelable(false); dialog.show(); } /** * In background thread, perform required setup, and request initialization * of the OCR engine. * * @param params [0] Pathname for the directory for storing language data files * to the SD card */ protected Boolean doInBackground(String... params) { // Check whether we need Cube data or Tesseract data. // Example Cube data filename: "tesseract-ocr-3.01.eng.tar" // Example Tesseract data filename: "eng.traineddata" String destinationFilenameBase = languageCode + ".traineddata"; // Check for, and create if necessary, folder to hold model data String destinationDirBase = params[0]; // "tessdata" subdirectory File tessdataDir = new File(destinationDirBase + File.separator + "tessdata"); if (!tessdataDir.exists() && !tessdataDir.mkdirs()) { Log.e(TAG, "Couldn't make directory " + tessdataDir); return false; } // Create a reference to the file to save the download in File destinationFile = new File(tessdataDir, destinationFilenameBase); // If language data files are not present, install them boolean installSuccess = false; if (!destinationFile.exists()) { Log.d(TAG, "Language data for " + languageCode + " not found in " + tessdataDir.toString()); // Check assets for language data to install. If not present, // download from Internet try { Log.d(TAG, "Checking for language data (" + destinationFilenameBase + ".zip) in application assets..."); // Check for a file like "eng.traineddata.zip" or // "tesseract-ocr-3.01.eng.tar.zip" installSuccess = installFromAssets(destinationFilenameBase + ".zip", tessdataDir); } catch (IOException e) { Log.e(TAG, "IOException", e); } catch (Exception e) { Log.e(TAG, "Got exception", e); } } else { Log.d(TAG, "Language data for " + languageCode + " already installed in " + tessdataDir.toString()); installSuccess = true; } // Dismiss the progress dialog box, revealing the indeterminate dialog // box behind it // Initialize the OCR engine return baseApi.init(destinationDirBase + File.separator, languageCode, ocrEngineMode) && installSuccess; } /** * Install a file from application assets to device external storage. * * @param sourceFilename File in assets to install * @param modelRoot Directory on SD card to install the file to * @param destinationFile File name for destination, excluding path * @return True if installZipFromAssets returns true * @throws IOException */ private boolean installFromAssets(String sourceFilename, File modelRoot) throws IOException { String extension = sourceFilename.substring(sourceFilename.lastIndexOf('.'), sourceFilename.length()); try { if (extension.equals(".zip")) { return installZipFromAssets(sourceFilename, modelRoot); } else { throw new IllegalArgumentException("Extension " + extension + " is unsupported."); } } catch (FileNotFoundException e) { Log.d(TAG, "Language not packaged in application assets."); } return false; } /** * Unzip the given Zip file, located in application assets, into the given * destination file. * * @param sourceFilename Name of the file in assets * @param destinationDir Directory to save the destination file in * @param destinationFile File to unzip into, excluding path * @return * @throws IOException * @throws FileNotFoundException */ private boolean installZipFromAssets(String sourceFilename, File destinationDir) throws IOException, FileNotFoundException { // Attempt to open the zip archive publishProgress("Uncompressing data for " + languageName + "...", "0"); ZipInputStream inputStream = new ZipInputStream(base.getContext() .getAssets() .open("tessdata/" + sourceFilename)); // Loop through all the files and folders in the zip archive (but there // should just be one) for (ZipEntry entry = inputStream.getNextEntry(); entry != null; entry = inputStream.getNextEntry()) { File destinationFile = new File(destinationDir, entry.getName()); if (entry.isDirectory()) { destinationFile.mkdirs(); } else { // Note getSize() returns -1 when the zipfile does not have the // size set long zippedFileSize = entry.getSize(); // Create a file output stream FileOutputStream outputStream = new FileOutputStream(destinationFile); final int BUFFER = 8192; // Buffer the output to the file BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(outputStream, BUFFER); int unzippedSize = 0; // Write the contents int count = 0; Integer percentComplete = 0; Integer percentCompleteLast = 0; byte[] data = new byte[BUFFER]; while ((count = inputStream.read(data, 0, BUFFER)) != -1) { bufferedOutputStream.write(data, 0, count); unzippedSize += count; percentComplete = (int) ((unzippedSize / (long) zippedFileSize) * 100); if (percentComplete > percentCompleteLast) { publishProgress("Uncompressing data for " + languageName + "...", percentComplete.toString(), "0"); percentCompleteLast = percentComplete; } } bufferedOutputStream.close(); } inputStream.closeEntry(); } inputStream.close(); return true; } /** * Update the dialog box with the latest incremental progress. * * @param message [0] Text to be displayed * @param message [1] Numeric value for the progress */ @Override protected void onProgressUpdate(String... message) { super.onProgressUpdate(message); int percentComplete = 0; percentComplete = Integer.parseInt(message[1]); if (dialog == null) { return; } dialog.setMessage(message[0]); dialog.setProgress(percentComplete); dialog.show(); } @Override protected void onPostExecute(Boolean result) { super.onPostExecute(result); if (dialog != null) { dialog.dismiss(); } if (result) { base.setBaseApi(baseApi); // Restart recognition base.resumeOcrEngine(); // activity.showLanguageName(); } else { base.showErrorMessage("Error", "Network is unreachable - cannot download language data. " + "Please enable network access and restart this app."); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.zk; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import org.I0Itec.zkclient.ZkClient; import org.apache.samza.Partition; import org.apache.samza.SamzaException; import org.apache.samza.config.Config; import org.apache.samza.config.MapConfig; import org.apache.samza.config.ZkConfig; import org.apache.samza.container.TaskName; import org.apache.samza.coordinator.JobCoordinatorListener; import org.apache.samza.coordinator.MetadataResourceUtil; import org.apache.samza.coordinator.StreamPartitionCountMonitor; import org.apache.samza.coordinator.metadatastore.CoordinatorStreamStore; import org.apache.samza.job.model.ContainerModel; import org.apache.samza.job.model.JobModel; import org.apache.samza.job.model.TaskModel; import org.apache.samza.metadatastore.MetadataStore; import org.apache.samza.metrics.MetricsRegistryMap; import org.apache.samza.startpoint.StartpointManager; import org.apache.samza.system.SystemStreamPartition; import org.apache.samza.util.NoOpMetricsRegistry; import org.apache.samza.zk.ZkJobCoordinator.ZkSessionStateChangedListener; import org.apache.zookeeper.Watcher; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyLong; import static org.mockito.Mockito.anyObject; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TestZkJobCoordinator { private static final String LATEST_JOB_MODEL_VERSION = "2"; private static final String PROCESSOR_ID = "testProcessor"; private static final String TEST_BARRIER_ROOT = "/testBarrierRoot"; private static final String TEST_JOB_MODEL_VERSION = "1"; private final Config config; private final JobModel jobModel; private final MetadataStore zkMetadataStore; private final CoordinatorStreamStore coordinatorStreamStore; private ZkUtils zkUtils; @Before public void setup() { ZkKeyBuilder keyBuilder = Mockito.mock(ZkKeyBuilder.class); ZkClient mockZkClient = Mockito.mock(ZkClient.class); when(keyBuilder.getJobModelVersionBarrierPrefix()).thenReturn(TEST_BARRIER_ROOT); zkUtils = Mockito.mock(ZkUtils.class); when(zkUtils.getKeyBuilder()).thenReturn(keyBuilder); when(zkUtils.getZkClient()).thenReturn(mockZkClient); } public TestZkJobCoordinator() { Map<String, String> configMap = ImmutableMap.of( "job.coordinator.system", "kafka", "job.name", "test-job", "systems.kafka.samza.factory", "org.apache.samza.system.MockSystemFactory", ZkConfig.STARTUP_WITH_ACTIVE_JOB_MODEL, "true"); config = new MapConfig(configMap); Set<SystemStreamPartition> ssps = ImmutableSet.of( new SystemStreamPartition("system1", "stream1_1", new Partition(0)), new SystemStreamPartition("system1", "stream1_2", new Partition(0))); Map<TaskName, TaskModel> tasksForContainer = ImmutableMap.of( new TaskName("t1"), new TaskModel(new TaskName("t1"), ssps, new Partition(0))); ContainerModel containerModel = new ContainerModel("0", tasksForContainer); jobModel = new JobModel(config, ImmutableMap.of("0", containerModel)); zkMetadataStore = Mockito.mock(MetadataStore.class); coordinatorStreamStore = Mockito.mock(CoordinatorStreamStore.class); } @Test public void testCheckAndExpireWithMultipleRebalances() { final TaskName taskName = new TaskName("task1"); final ContainerModel mockContainerModel = mock(ContainerModel.class); final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); final JobModel jobModelVersion1 = mock(JobModel.class); final JobModel jobModelVersion2 = mock(JobModel.class); final JobModel jobModelVersion3 = jobModelVersion1; when(mockContainerModel.getTasks()).thenReturn(ImmutableMap.of(taskName, mock(TaskModel.class))); when(jobModelVersion3.getContainers()).thenReturn(ImmutableMap.of(PROCESSOR_ID, mockContainerModel)); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.setActiveJobModel(jobModelVersion1); /* * The following mimics the scenario where new work assignment(V2) is proposed by the leader and the work assignment * differs from the active work assignment(V1) and hence results in job model expiration */ zkJobCoordinator.checkAndExpireJobModel(jobModelVersion2); verify(mockListener, times(1)).onJobModelExpired(); assertTrue("JobModelExpired should be true for work assignment changes", zkJobCoordinator.getJobModelExpired()); assertEquals("Active job model shouldn't be updated", jobModelVersion1, zkJobCoordinator.getActiveJobModel()); /* * The following mimics the scenario where leader kicked off another rebalance where the new work assignment(V3) * is same as the old work assignment(V1) and doesn't trigger job model expiration. We check the interactions w/ * the listener to ensure job model expiration isn't invoked. However, the previous rebalance should have already * triggered job model expiration and set the job model expired flag to true */ zkJobCoordinator.checkAndExpireJobModel(jobModelVersion1); verifyNoMoreInteractions(mockListener); assertTrue("JobModelExpired should remain unchanged", zkJobCoordinator.getJobModelExpired()); assertEquals("Active job model shouldn't be updated", jobModelVersion1, zkJobCoordinator.getActiveJobModel()); /* * The following mimics the scenario where the new work assignment(V3) proposed by the leader is accepted and * on new job model is invoked. Even though the work assignment remains the same w/ the active job model version, * onNewJobModel is invoked on the listener as an intermediate rebalance expired the old work assignment(V1) */ zkJobCoordinator.onNewJobModel(jobModelVersion3); verify(mockListener, times(1)).onNewJobModel(PROCESSOR_ID, jobModelVersion3); verify(zkUtils, times(1)).writeTaskLocality(any(), any()); assertEquals("Active job model should be updated to new job model", zkJobCoordinator.getActiveJobModel(), jobModelVersion3); assertFalse("JobModelExpired should be set to false after onNewJobModel", zkJobCoordinator.getJobModelExpired()); } @Test public void testCheckAndExpireWithNoChangeInWorkAssignment() { BiConsumer<ZkUtils, JobCoordinatorListener> verificationMethod = (ignored, coordinatorListener) -> verifyZeroInteractions(coordinatorListener); testNoChangesInWorkAssignmentHelper(ZkJobCoordinator::checkAndExpireJobModel, verificationMethod); } @Test public void testCheckAndExpireWithChangeInWorkAssignment() { final String processorId = "testProcessor"; JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(processorId, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.checkAndExpireJobModel(mock(JobModel.class)); verify(mockListener, times(1)).onJobModelExpired(); } @Test(expected = NullPointerException.class) public void testCheckAndExpireJobModelWithNullJobModel() { final String processorId = "testProcessor"; ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(processorId, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.checkAndExpireJobModel(null); } @Test public void testOnNewJobModelWithChangeInWorkAssignment() { final TaskName taskName = new TaskName("task1"); final ContainerModel mockContainerModel = mock(ContainerModel.class); final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); final JobModel mockJobModel = mock(JobModel.class); when(mockContainerModel.getTasks()).thenReturn(ImmutableMap.of(taskName, mock(TaskModel.class))); when(mockJobModel.getContainers()).thenReturn(ImmutableMap.of(PROCESSOR_ID, mockContainerModel)); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.setJobModelExpired(true); zkJobCoordinator.onNewJobModel(mockJobModel); verify(zkUtils, times(1)).writeTaskLocality(eq(taskName), any()); verify(mockListener, times(1)).onNewJobModel(PROCESSOR_ID, mockJobModel); assertEquals("Active job model should be updated with the new job model", mockJobModel, zkJobCoordinator.getActiveJobModel()); } @Test public void testOnNewJobModelWithNoChangesInWorkAssignment() { BiConsumer<ZkUtils, JobCoordinatorListener> verificationMethod = (zkUtils, coordinatorListener) -> { verify(zkUtils, times(0)).writeTaskLocality(any(), any()); verifyZeroInteractions(coordinatorListener); }; testNoChangesInWorkAssignmentHelper(ZkJobCoordinator::onNewJobModel, verificationMethod); } @Test(expected = NullPointerException.class) public void testOnNewJobModelWithNullJobModel() { ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.onNewJobModel(null); } /** * Test job model version changed changes to work assignment. In this scenario, existing work should * be stopped a.k.a processor should stop the container through the listener. The processor then proceeds to join * the barrier to notify its acceptance on the proposed job model. */ @Test public void testJobModelVersionChangeWithChangeInWorkAssignment() throws Exception { BiConsumer<ZkBarrierForVersionUpgrade, JobCoordinatorListener> verificationMethod = (barrier, listener) -> { verify(listener, times(1)).onJobModelExpired(); verify(barrier, times(1)).join(TEST_JOB_MODEL_VERSION, PROCESSOR_ID); }; testJobModelVersionChangeHelper(null, mock(JobModel.class), verificationMethod); } /** * Test job model version changed without any changes to work assignment. In this scenario, existing work should * not be stopped a.k.a processor shouldn't stop the container. However, the processor proceeds to join the barrier * to notify its acceptance on the proposed job model. */ @Test public void testJobModelVersionChangeWithNoChangeInWorkAssignment() throws Exception { final JobModel jobModel = mock(JobModel.class); BiConsumer<ZkBarrierForVersionUpgrade, JobCoordinatorListener> verificationMethod = (barrier, listener) -> { verifyZeroInteractions(listener); verify(barrier, times(1)).join(TEST_JOB_MODEL_VERSION, PROCESSOR_ID); }; testJobModelVersionChangeHelper(jobModel, jobModel, verificationMethod); } @Test public void testShouldRemoveBufferedEventsInDebounceQueueOnSessionExpiration() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>())); ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); zkJobCoordinator.debounceTimer = mockDebounceTimer; zkJobCoordinator.zkSessionMetrics = new ZkSessionMetrics(new MetricsRegistryMap()); final ZkSessionStateChangedListener zkSessionStateChangedListener = zkJobCoordinator.new ZkSessionStateChangedListener(); zkSessionStateChangedListener.handleStateChanged(Watcher.Event.KeeperState.Expired); verify(zkUtils).incGeneration(); verify(mockDebounceTimer).cancelAllScheduledActions(); verify(mockDebounceTimer).scheduleAfterDebounceTime(eq("ZK_SESSION_EXPIRED"), eq(0L), Mockito.any(Runnable.class)); Assert.assertEquals(1, zkJobCoordinator.zkSessionMetrics.zkSessionExpirations.getCount()); } @Test public void testZookeeperSessionMetricsAreUpdatedCorrectly() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>())); ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); zkJobCoordinator.debounceTimer = mockDebounceTimer; zkJobCoordinator.zkSessionMetrics = new ZkSessionMetrics(new MetricsRegistryMap()); final ZkSessionStateChangedListener zkSessionStateChangedListener = zkJobCoordinator.new ZkSessionStateChangedListener(); zkSessionStateChangedListener.handleStateChanged(Watcher.Event.KeeperState.Disconnected); zkSessionStateChangedListener.handleStateChanged(Watcher.Event.KeeperState.SyncConnected); zkSessionStateChangedListener.handleStateChanged(Watcher.Event.KeeperState.AuthFailed); Assert.assertEquals(1, zkJobCoordinator.zkSessionMetrics.zkSessionErrors.getCount()); zkSessionStateChangedListener.handleSessionEstablishmentError(new SamzaException("Test exception")); Assert.assertEquals(1, zkJobCoordinator.zkSessionMetrics.zkSessionDisconnects.getCount()); Assert.assertEquals(1, zkJobCoordinator.zkSessionMetrics.zkSyncConnected.getCount()); Assert.assertEquals(2, zkJobCoordinator.zkSessionMetrics.zkSessionErrors.getCount()); } @Test public void testShouldStopPartitionCountMonitorOnSessionExpiration() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>())); ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); StreamPartitionCountMonitor monitor = Mockito.mock(StreamPartitionCountMonitor.class); zkJobCoordinator.debounceTimer = mockDebounceTimer; zkJobCoordinator.streamPartitionCountMonitor = monitor; ZkSessionStateChangedListener zkSessionStateChangedListener = zkJobCoordinator.new ZkSessionStateChangedListener(); zkSessionStateChangedListener.handleStateChanged(Watcher.Event.KeeperState.Expired); Mockito.verify(monitor).stop(); } @Test public void testShouldStartPartitionCountMonitorOnBecomingLeader() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>())); ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); StreamPartitionCountMonitor monitor = Mockito.mock(StreamPartitionCountMonitor.class); zkJobCoordinator.debounceTimer = mockDebounceTimer; zkJobCoordinator.streamPartitionCountMonitor = monitor; doReturn(monitor).when(zkJobCoordinator).getPartitionCountMonitor(); ZkJobCoordinator.LeaderElectorListenerImpl listener = zkJobCoordinator.new LeaderElectorListenerImpl(); listener.onBecomingLeader(); Mockito.verify(monitor).start(); } @Test public void testShouldStopPartitionCountMonitorWhenStoppingTheJobCoordinator() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(new JobModel(new MapConfig(), new HashMap<>())); ScheduleAfterDebounceTime mockDebounceTimer = Mockito.mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); StreamPartitionCountMonitor monitor = Mockito.mock(StreamPartitionCountMonitor.class); zkJobCoordinator.debounceTimer = mockDebounceTimer; zkJobCoordinator.streamPartitionCountMonitor = monitor; zkJobCoordinator.stop(); Mockito.verify(monitor).stop(); } @Test public void testStartWithActiveJobModelDisabled() { final ScheduleAfterDebounceTime mockDebounceTimer = mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setLeaderElector(mock(ZkLeaderElector.class)); zkJobCoordinator.setDebounceTimer(mockDebounceTimer); zkJobCoordinator.start(); verifyZeroInteractions(mockDebounceTimer); } @Test public void testStartWithActiveJobModelEnabled() { final ScheduleAfterDebounceTime mockDebounceTimer = mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, config, new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setLeaderElector(mock(ZkLeaderElector.class)); zkJobCoordinator.setDebounceTimer(mockDebounceTimer); zkJobCoordinator.start(); verify(mockDebounceTimer, times(1)).scheduleAfterDebounceTime( eq(ZkJobCoordinator.START_WORK_WITH_LAST_ACTIVE_JOB_MODEL), anyLong(), any()); } @Test public void testStartWorkWithLastActiveJobModel() { final TaskName taskName = new TaskName("task1"); final ContainerModel mockContainerModel = mock(ContainerModel.class); final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); final JobModel mockJobModel = mock(JobModel.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); when(mockContainerModel.getTasks()).thenReturn(ImmutableMap.of(taskName, mock(TaskModel.class))); when(mockJobModel.getContainers()).thenReturn(ImmutableMap.of(PROCESSOR_ID, mockContainerModel)); when(zkUtils.getLastActiveJobModelVersion()).thenReturn(TEST_JOB_MODEL_VERSION); when(zkUtils.getJobModelVersion()).thenReturn(TEST_JOB_MODEL_VERSION); doReturn(mockJobModel).when(zkJobCoordinator).readJobModelFromMetadataStore(TEST_JOB_MODEL_VERSION); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.startWorkWithLastActiveJobModel(); verify(mockListener, times(1)).onJobModelExpired(); verify(zkUtils, times(1)).writeTaskLocality(eq(taskName), any()); verify(mockListener, times(1)).onNewJobModel(PROCESSOR_ID, mockJobModel); assertEquals("Active job model should be updated with the new job model", mockJobModel, zkJobCoordinator.getActiveJobModel()); } @Test public void testStartWorkWithLastActiveJobModelShouldNotStartContainer() { final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); when(zkUtils.getLastActiveJobModelVersion()).thenReturn(TEST_JOB_MODEL_VERSION); when(zkUtils.getJobModelVersion()).thenReturn(LATEST_JOB_MODEL_VERSION); zkJobCoordinator.startWorkWithLastActiveJobModel(); verifyZeroInteractions(mockListener); assertNull("Expected active job model to be null", zkJobCoordinator.getActiveJobModel()); } @Test public void testStartWorkWithLastActiveJobModelWithNullActiveJobModelVersion() { final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.startWorkWithLastActiveJobModel(); verifyZeroInteractions(mockListener); assertNull("Expected active job model to be null", zkJobCoordinator.getActiveJobModel()); } @Test public void testLoadMetadataResources() throws IOException { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(jobModel); StartpointManager mockStartpointManager = Mockito.mock(StartpointManager.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, config, new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); doReturn(mockStartpointManager).when(zkJobCoordinator).createStartpointManager(); MetadataResourceUtil mockMetadataResourceUtil = mock(MetadataResourceUtil.class); doReturn(mockMetadataResourceUtil).when(zkJobCoordinator).createMetadataResourceUtil(any(), any(Config.class)); verifyZeroInteractions(mockStartpointManager); zkJobCoordinator.loadMetadataResources(jobModel); verify(mockMetadataResourceUtil).createResources(); verify(mockStartpointManager).start(); verify(mockStartpointManager).fanOut(any()); verify(mockStartpointManager).stop(); } @Test public void testDoOnProcessorChange() { when(zkUtils.getJobModel(TEST_JOB_MODEL_VERSION)).thenReturn(jobModel); StartpointManager mockStartpointManager = Mockito.mock(StartpointManager.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, config, new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); doReturn(mockStartpointManager).when(zkJobCoordinator).createStartpointManager(); doReturn(jobModel).when(zkJobCoordinator).generateNewJobModel(any()); doNothing().when(zkJobCoordinator).loadMetadataResources(jobModel); zkJobCoordinator.doOnProcessorChange(); verify(zkUtils).publishJobModelVersion(anyString(), anyString()); verify(zkJobCoordinator).loadMetadataResources(eq(jobModel)); } @Test public void testDoOnProcessorChangeWithNoChangesToWorkAssignment() { ZkBarrierForVersionUpgrade mockBarrier = mock(ZkBarrierForVersionUpgrade.class); ScheduleAfterDebounceTime mockDebounceTimer = mock(ScheduleAfterDebounceTime.class); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, config, new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); zkJobCoordinator.setActiveJobModel(jobModel); zkJobCoordinator.setDebounceTimer(mockDebounceTimer); zkJobCoordinator.setZkBarrierUpgradeForVersion(mockBarrier); doReturn(jobModel).when(zkJobCoordinator).generateNewJobModel(any()); zkJobCoordinator.doOnProcessorChange(); verify(zkUtils, times(0)).publishJobModelVersion(anyString(), anyString()); verifyZeroInteractions(mockBarrier); verifyZeroInteractions(mockDebounceTimer); verify(zkJobCoordinator, times(0)).loadMetadataResources(any()); } private void testNoChangesInWorkAssignmentHelper(BiConsumer<ZkJobCoordinator, JobModel> testMethod, BiConsumer<ZkUtils, JobCoordinatorListener> verificationMethod) { final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); final JobModel mockJobModel = mock(JobModel.class); ZkJobCoordinator zkJobCoordinator = new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.setActiveJobModel(mockJobModel); testMethod.accept(zkJobCoordinator, mockJobModel); verificationMethod.accept(zkUtils, mockListener); } private void testJobModelVersionChangeHelper(JobModel activeJobModel, JobModel newJobModel, BiConsumer<ZkBarrierForVersionUpgrade, JobCoordinatorListener> verificationMethod) throws InterruptedException { final CountDownLatch completionLatch = new CountDownLatch(1); final JobCoordinatorListener mockListener = mock(JobCoordinatorListener.class); final ScheduleAfterDebounceTime mockDebounceTimer = mock(ScheduleAfterDebounceTime.class); final ZkBarrierForVersionUpgrade mockBarrier = mock(ZkBarrierForVersionUpgrade.class); doAnswer(ctx -> { Object[] args = ctx.getArguments(); ((Runnable) args[2]).run(); completionLatch.countDown(); return null; }).when(mockDebounceTimer).scheduleAfterDebounceTime(anyString(), anyLong(), anyObject()); ZkJobCoordinator zkJobCoordinator = Mockito.spy(new ZkJobCoordinator(PROCESSOR_ID, new MapConfig(), new NoOpMetricsRegistry(), zkUtils, zkMetadataStore, coordinatorStreamStore)); zkJobCoordinator.setListener(mockListener); zkJobCoordinator.setActiveJobModel(activeJobModel); zkJobCoordinator.setZkBarrierUpgradeForVersion(mockBarrier); zkJobCoordinator.debounceTimer = mockDebounceTimer; doReturn(newJobModel).when(zkJobCoordinator).readJobModelFromMetadataStore(TEST_JOB_MODEL_VERSION); final ZkJobCoordinator.ZkJobModelVersionChangeHandler zkJobModelVersionChangeHandler = zkJobCoordinator.new ZkJobModelVersionChangeHandler(zkUtils); zkJobModelVersionChangeHandler.doHandleDataChange("path", TEST_JOB_MODEL_VERSION); completionLatch.await(1, TimeUnit.SECONDS); verificationMethod.accept(mockBarrier, mockListener); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.file.remote; import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.PollingConsumer; import org.apache.camel.Processor; import org.apache.camel.component.file.GenericFile; import org.apache.camel.component.file.GenericFileEndpoint; import org.apache.camel.component.file.GenericFileExist; import org.apache.camel.component.file.GenericFilePollingConsumer; import org.apache.camel.component.file.GenericFileProducer; import org.apache.camel.spi.UriParam; import org.apache.camel.support.processor.idempotent.MemoryIdempotentRepository; import org.apache.camel.util.StringHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Remote file endpoint. */ public abstract class RemoteFileEndpoint<T> extends GenericFileEndpoint<T> { private static final Logger LOG = LoggerFactory.getLogger(RemoteFileEndpoint.class); @UriParam(label = "advanced", description = "Specifies the maximum reconnect attempts Camel performs when it " + "tries to connect to the remote FTP server. Use 0 to disable this behavior.") private int maximumReconnectAttempts = 3; @UriParam(label = "advanced", defaultValue = "1000", description = "Delay in millis Camel will wait before performing a reconnect attempt.", javaType = "java.time.Duration") private long reconnectDelay = 1000; @UriParam(label = "common", description = "Whether or not to disconnect from remote FTP server right after use. " + "Disconnect will only disconnect the current connection to the FTP server. If you have a consumer which " + "you want to stop, then you need to stop the consumer/route instead.") private boolean disconnect; @UriParam(label = "producer,advanced", description = "Whether or not to disconnect from remote FTP server right " + "after a Batch upload is complete. disconnectOnBatchComplete will only disconnect the current connection " + "to the FTP server.") private boolean disconnectOnBatchComplete; @UriParam(label = "common,advanced", description = "If set this option to be true, camel-ftp will use the list " + "file directly to check if the file exists. Since some FTP server may not support to list the file " + "directly, if the option is false, camel-ftp will use the old way to list the directory and check if the " + "file exists. This option also influences readLock=changed to control whether it performs a fast check " + "to update file information or not. This can be used to speed up the process if the FTP server has a lot " + "of files.") private boolean fastExistsCheck; @UriParam(label = "consumer,advanced", description = "Whether the FTP consumer should download the file. If this " + "option is set to false, then the message body will be null, but the consumer will still trigger a Camel " + "Exchange that has details about the file such as file name, file size, etc. It's just that the file will " + "not be downloaded.") private boolean download = true; public RemoteFileEndpoint() { // ftp must be synchronous as the ftp-client is not thread-safe setSynchronous(true); // no args constructor for spring bean endpoint configuration // for ftp we need to use higher interval/checkout that for files setReadLockTimeout(20000); setReadLockCheckInterval(5000); // explicitly set RemoteFilePollingConsumerPollStrategy otherwise // DefaultPollingConsumerPollStrategy is be used setPollStrategy(new RemoteFilePollingConsumerPollStrategy()); } public RemoteFileEndpoint(String uri, RemoteFileComponent<T> component, RemoteFileConfiguration configuration) { super(uri, component); this.configuration = configuration; // ftp must be synchronous as the ftp-client is not thread-safe setSynchronous(true); // for ftp we need to use higher interval/checkout that for files setReadLockTimeout(20000); setReadLockCheckInterval(5000); // explicitly set RemoteFilePollingConsumerPollStrategy otherwise // DefaultPollingConsumerPollStrategy is be used setPollStrategy(new RemoteFilePollingConsumerPollStrategy()); } @Override public boolean isSingletonProducer() { // this producer is stateful because the remote file operations is not // thread safe return false; } @Override public RemoteFileConfiguration getConfiguration() { return (RemoteFileConfiguration) this.configuration; } @Override public Exchange createExchange(GenericFile<T> file) { Exchange answer = super.createExchange(); if (file != null) { file.bindToExchange(answer); } return answer; } @Override public GenericFileProducer<T> createProducer() throws Exception { afterPropertiesSet(); // you cannot use temp file and file exists append if (getFileExist() == GenericFileExist.Append && (getTempPrefix() != null || getTempFileName() != null)) { throw new IllegalArgumentException("You cannot set both fileExist=Append and tempPrefix/tempFileName options"); } // ensure fileExist and moveExisting is configured correctly if in use if (getFileExist() == GenericFileExist.Move && getMoveExisting() == null) { throw new IllegalArgumentException("You must configure moveExisting option when fileExist=Move"); } else if (getMoveExisting() != null && getFileExist() != GenericFileExist.Move) { throw new IllegalArgumentException("You must configure fileExist=Move when moveExisting has been set"); } return buildProducer(); } @Override public RemoteFileConsumer<T> createConsumer(Processor processor) throws Exception { afterPropertiesSet(); RemoteFileConsumer<T> consumer = buildConsumer(processor); if (isDelete() && getMove() != null) { throw new IllegalArgumentException("You cannot both set delete=true and move options"); } // if noop=true then idempotent should also be configured if (isNoop() && !isIdempotentSet()) { LOG.info("Endpoint is configured with noop=true so forcing endpoint to be idempotent as well"); setIdempotent(true); } // if idempotent and no repository set then create a default one if (isIdempotentSet() && isIdempotent() && idempotentRepository == null) { LOG.info("Using default memory based idempotent repository with cache max size: {}", DEFAULT_IDEMPOTENT_CACHE_SIZE); idempotentRepository = MemoryIdempotentRepository.memoryIdempotentRepository(DEFAULT_IDEMPOTENT_CACHE_SIZE); } if (!getConfiguration().isUseList() && getFileName() == null) { throw new IllegalArgumentException( "Endpoint is configured with useList=false, then fileName must be configured also"); } // set max messages per poll consumer.setMaxMessagesPerPoll(getMaxMessagesPerPoll()); consumer.setEagerLimitMaxMessagesPerPoll(isEagerMaxMessagesPerPoll()); configureConsumer(consumer); return consumer; } @Override public PollingConsumer createPollingConsumer() throws Exception { if (LOG.isDebugEnabled()) { LOG.debug("Creating GenericFilePollingConsumer with queueSize: {} blockWhenFull: {} blockTimeout: {}", getPollingConsumerQueueSize(), isPollingConsumerBlockWhenFull(), getPollingConsumerBlockTimeout()); } GenericFilePollingConsumer result = new GenericFilePollingConsumer(this); // should not call configurePollingConsumer when its // GenericFilePollingConsumer result.setBlockWhenFull(isPollingConsumerBlockWhenFull()); result.setBlockTimeout(getPollingConsumerBlockTimeout()); return result; } /** * Validates this endpoint if its configured properly. * * @throws IllegalArgumentException is thrown if endpoint is invalid configured for its mandatory options */ protected void afterPropertiesSet() { RemoteFileConfiguration config = getConfiguration(); StringHelper.notEmpty(config.getHost(), "host"); StringHelper.notEmpty(config.getProtocol(), "protocol"); if (!isSynchronous()) { throw new IllegalArgumentException("Using synchronous=false is not supported for camel-ftp"); } } @Override protected Map<String, Object> getParamsAsMap() { Map<String, Object> map = super.getParamsAsMap(); map.put("fastExistsCheck", fastExistsCheck); return map; } /** * Remote File Endpoints, impl this method to create a custom consumer specific to their "protocol" etc. * * @param processor the processor * @return the created consumer */ protected abstract RemoteFileConsumer<T> buildConsumer(Processor processor); /** * Remote File Endpoints, impl this method to create a custom producer specific to their "protocol" etc. * * @return the created producer */ protected abstract GenericFileProducer<T> buildProducer(); /** * Creates the operations to be used by the consumer or producer. * * @return a new created operations * @throws Exception is thrown if error creating operations. */ public abstract RemoteFileOperations<T> createRemoteFileOperations() throws Exception; /** * Returns human readable server information for logging purpose */ public String remoteServerInformation() { return ((RemoteFileConfiguration) configuration).remoteServerInformation(); } @Override public char getFileSeparator() { return '/'; } @Override public boolean isAbsolute(String name) { return name.startsWith("/"); } public int getMaximumReconnectAttempts() { return maximumReconnectAttempts; } /** * Specifies the maximum reconnect attempts Camel performs when it tries to connect to the remote FTP server. Use 0 * to disable this behavior. */ public void setMaximumReconnectAttempts(int maximumReconnectAttempts) { this.maximumReconnectAttempts = maximumReconnectAttempts; } public long getReconnectDelay() { return reconnectDelay; } /** * Delay in millis Camel will wait before performing a reconnect attempt. */ public void setReconnectDelay(long reconnectDelay) { this.reconnectDelay = reconnectDelay; } public boolean isDisconnect() { return disconnect; } /** * Whether or not to disconnect from remote FTP server right after use. Disconnect will only disconnect the current * connection to the FTP server. If you have a consumer which you want to stop, then you need to stop the * consumer/route instead. */ public void setDisconnect(boolean disconnect) { this.disconnect = disconnect; } public boolean isDisconnectOnBatchComplete() { return disconnectOnBatchComplete; } /** * Whether or not to disconnect from remote FTP server right after a Batch upload is complete. * disconnectOnBatchComplete will only disconnect the current connection to the FTP server. */ public void setDisconnectOnBatchComplete(boolean disconnectOnBatchComplete) { this.disconnectOnBatchComplete = disconnectOnBatchComplete; } public boolean isFastExistsCheck() { return fastExistsCheck; } /** * If set this option to be true, camel-ftp will use the list file directly to check if the file exists. Since some * FTP server may not support to list the file directly, if the option is false, camel-ftp will use the old way to * list the directory and check if the file exists. This option also influences readLock=changed to control whether * it performs a fast check to update file information or not. This can be used to speed up the process if the FTP * server has a lot of files. */ public void setFastExistsCheck(boolean fastExistsCheck) { this.fastExistsCheck = fastExistsCheck; } public boolean isDownload() { return this.download; } /** * Whether the FTP consumer should download the file. If this option is set to false, then the message body will be * null, but the consumer will still trigger a Camel Exchange that has details about the file such as file name, * file size, etc. It's just that the file will not be downloaded. */ public void setDownload(boolean download) { this.download = download; } }
/* * Copyright 2013-2017 Jonathan Vasquez <jon@xyinn.org> * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.vasquez; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.reflect.TypeToken; import org.apache.commons.io.FileUtils; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import javax.swing.JOptionPane; import javax.swing.table.AbstractTableModel; public class EntryWithModel { private String entriesFile; private ArrayList<Entry> list; private String[] columnNames = {"Version", "Exp", "Last", "Label", "Flags"}; private EntryModel peModel; public EntryWithModel() { entriesFile = "Entries.json"; list = loadData(); peModel = new EntryModel(); } public Entry getLastRanEntry() { for(Entry e: list) { if (e.WasLastRan) { return e; } } return null; } // No duplicates are allowed, the only exception is if the person wants to // add an entry with the same name, version, and path, but wants different flags. public boolean doesEntryExistForThisMode(String label, boolean isExpansion, String version, String path) { for(Entry entry: list) { if(entry.Label.equalsIgnoreCase(label) && entry.IsExpansion == isExpansion) { // if the label and the mode are the same, // lets check to see if we should allow this duplicate or not. // only if the version and the path is the same we should allow it. if (entry.Version.equalsIgnoreCase(version) && entry.Path.equalsIgnoreCase(path)) { return false; } return true; } } return false; } // Retrieves an entry that has the label, version, game type as the one we are looking for. // This is used in order to auto-select another equivalent entry when we delete one that has // essentially the same information (But different flags). public Entry getEquivalentEntry(Entry currentEntry) { for(Entry entry: list) { if(entry.Label.equalsIgnoreCase(currentEntry.Label) && entry.IsExpansion == currentEntry.IsExpansion) { // Duplicate entry with different flags if anything if (entry.Version.equalsIgnoreCase(currentEntry.Version) && entry.Path.equalsIgnoreCase(currentEntry.Path)) { return entry; } return null; } } return null; } public int getEntryIndex(Entry entry) { for (int i = 0; i < list.size(); i++) { Entry currentEntry = list.get(i); if (entry.Label.equalsIgnoreCase(currentEntry.Label) && entry.IsExpansion == currentEntry.IsExpansion && entry.Path.equalsIgnoreCase(currentEntry.Path) && entry.Version.equalsIgnoreCase(currentEntry.Version) && entry.Flags.equalsIgnoreCase(currentEntry.Flags)) { return i; } } return -1; } public void ClearAllLastRan() { for(Entry entry: list) { entry.WasLastRan = false; } } public int addEntry(String version, String path, String flags, String label, boolean expansion) { return peModel.addEntry(version, path, flags, label, expansion); } public int modifyEntry(String version, String path, String flags, String label, boolean expansion, boolean wasLastRan, int e) { // Check to make sure that there is only one last ran selected. // An easy way to do this is just to set everything to false // and only enable the one we want. if (wasLastRan) { ClearAllLastRan(); } peModel.modifyEntry(version, path, flags, label, expansion, wasLastRan, e); return e; } public int delEntry(int e) { return peModel.delEntry(e); } public Entry getEntry(int i) { return list.get(i); } public int copyEntry(int e) { return peModel.copyEntry(e); } public Object getValueAt(int row, int col) { return peModel.getValueAt(row, col); } public String getSelectedVersion(int row) { return peModel.getSelectedVersion(row); } public String getSelectedPath(int row) { return peModel.getSelectedPath(row); } public String getSelectedFlags(int row) { return peModel.getSelectedFlags(row); } public String getSelectedLabel(int row) { return peModel.getSelectedLabel(row); } public boolean isSelectedExpansion(int row) { return peModel.isSelectedExpansion(row); } public boolean wasLastRan(int row) { return peModel.wasLastRan(row); } public int getSize() { return list.size(); } public void saveData() { try (BufferedWriter bw = new BufferedWriter(new FileWriter(entriesFile))) { bw.write(new GsonBuilder().setPrettyPrinting().create().toJson(list)); peModel.triggerUIUpdate(); } catch (IOException e) { e.printStackTrace(); } } public ArrayList<Entry> loadData() { ArrayList<Entry> stagedEntries = new ArrayList<>(); try { File entryFile = new File(entriesFile); if(entryFile.exists()) { String fileContents = FileUtils.readFileToString(entryFile, "UTF-8"); ArrayList<Entry> potentialEntryList = new Gson().fromJson(fileContents, new TypeToken<ArrayList<Entry>>(){}.getType()); // Looks like the GSON library doesn't really support having a default value for a parameter (without writing your // own custom serializer. I'm just going to scan all of the entries and check if the Label is null. If it is, we // set it to the version (to maintain old behavior). if(potentialEntryList != null) { for (Entry e: potentialEntryList) { if (e.Label == null) { e.Label = e.Version; } } return potentialEntryList; } return stagedEntries; } JOptionPane.showMessageDialog(null, "Welcome to Bliss Version Switcher. \n\nPlease add an entry for your Game.exe and make sure that it is the\n" + "same version as what's in that folder, and then click Launch.\n" + "Picking a different version than what's in the folder will cause problems.\n\n" + "For example, if my Diablo II folder is currently located at D:\\Games\\Diablo II\\Game.exe\n" + "the current version of Diablo II in that directory is 1.14d, and I wanna label this folder\n" + "\"Nightwalker\" in the switcher, I'm playing Expansion, with window mode\n" + "and no sound, then my initial entry for the switcher would look as follows:\n\n" + "Version: 1.14d\n" + "Label: Nightwalker\n" + "Path (Game.exe): D:\\Games\\Diablo II\\Game.exe\n" + "Flags: -w -ns\n" + "Expansion: Yes"); entryFile.createNewFile(); } catch (IOException e) { e.printStackTrace(); } return stagedEntries; } public EntryModel getModel() { return peModel; } public ArrayList<Entry> getList() { return list; } private class EntryModel extends AbstractTableModel { public void triggerUIUpdate() { fireTableDataChanged(); } public int getColumnCount() { return columnNames.length; } public int getRowCount() { return list.size(); } public String getColumnName(int col) { return columnNames[col]; } public Object getValueAt(int row, int col) { Entry e = list.get(row); switch(col) { case 0: return e.Version; case 1: return e.IsExpansion; case 2: return e.WasLastRan; case 3: return e.Label; case 4: return e.Flags; default: return "error"; } } // Get the class type of the column so that we can get the check boxes to render correctly public Class getColumnClass(int c) { return getValueAt(0, c).getClass(); } public String getSelectedVersion(int row) { return list.get(row).Version; } public String getSelectedPath(int row) { return list.get(row).Path; } public String getSelectedFlags(int row) { return list.get(row).Flags; } public String getSelectedLabel(int row) { return list.get(row).Label; } public boolean isSelectedExpansion(int row) { return list.get(row).IsExpansion; } public boolean wasLastRan(int row) { return list.get(row).WasLastRan; } public int addEntry(String version, String path, String flags, String label, boolean expansion) { list.add(new Entry(version, path, flags, label, expansion, false)); saveData(); return list.size()-1; } public int modifyEntry(String version, String path, String flags, String label, boolean expansion, boolean wasLastRan, int e) { Entry t = list.get(e); t.Version = version; t.IsExpansion = expansion; t.Path = path; t.Flags = flags; t.Label = label; t.WasLastRan = wasLastRan; saveData(); return e; } // Deletes an entry from the list // Returns: The index of an identical version & type entry if it exists // Example: Two entries are identical but with different flags. public int delEntry(int entry) { final int ERROR_OR_NULL = -1; if(entry != ERROR_OR_NULL) { Entry removedEntry = list.remove(entry); Entry newEntry = getEquivalentEntry(removedEntry); if (removedEntry.WasLastRan && newEntry != null) { newEntry.WasLastRan = true; } saveData(); Entry lastRanEntry = getLastRanEntry(); int newIndex = ERROR_OR_NULL; if (newEntry != null) { return getEntryIndex(newEntry); } if (lastRanEntry != null) { return getEntryIndex(lastRanEntry); } return newIndex; } // Returns null if some error happened return ERROR_OR_NULL; } // Copies the entry that is passed to this method and then inserts it into the list public int copyEntry(int entry) { int next = entry + 1; if(entry != -1) { Entry oldEntry = list.get(entry); // When we copy an entry, don't copy the Label because that can cause problems if // the person changes the version but keeps the same copied label. This method // forces the person to name the entry which will case a re-validation of label names. Entry newEntry = new Entry(oldEntry.Version, oldEntry.Path, oldEntry.Flags, "", oldEntry.IsExpansion, false); list.add(next, newEntry); saveData(); return next; } else { return -1; } } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/io.proto package com.google.cloud.aiplatform.v1; /** * * * <pre> * The Container Registry location for the container image. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ContainerRegistryDestination} */ public final class ContainerRegistryDestination extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ContainerRegistryDestination) ContainerRegistryDestinationOrBuilder { private static final long serialVersionUID = 0L; // Use ContainerRegistryDestination.newBuilder() to construct. private ContainerRegistryDestination(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ContainerRegistryDestination() { outputUri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ContainerRegistryDestination(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ContainerRegistryDestination( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); outputUri_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.IoProto .internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.IoProto .internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ContainerRegistryDestination.class, com.google.cloud.aiplatform.v1.ContainerRegistryDestination.Builder.class); } public static final int OUTPUT_URI_FIELD_NUMBER = 1; private volatile java.lang.Object outputUri_; /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The outputUri. */ @java.lang.Override public java.lang.String getOutputUri() { java.lang.Object ref = outputUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); outputUri_ = s; return s; } } /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for outputUri. */ @java.lang.Override public com.google.protobuf.ByteString getOutputUriBytes() { java.lang.Object ref = outputUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); outputUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, outputUri_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, outputUri_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ContainerRegistryDestination)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ContainerRegistryDestination other = (com.google.cloud.aiplatform.v1.ContainerRegistryDestination) obj; if (!getOutputUri().equals(other.getOutputUri())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + OUTPUT_URI_FIELD_NUMBER; hash = (53 * hash) + getOutputUri().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.ContainerRegistryDestination prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The Container Registry location for the container image. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ContainerRegistryDestination} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ContainerRegistryDestination) com.google.cloud.aiplatform.v1.ContainerRegistryDestinationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.IoProto .internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.IoProto .internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ContainerRegistryDestination.class, com.google.cloud.aiplatform.v1.ContainerRegistryDestination.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ContainerRegistryDestination.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); outputUri_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.IoProto .internal_static_google_cloud_aiplatform_v1_ContainerRegistryDestination_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ContainerRegistryDestination getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ContainerRegistryDestination.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ContainerRegistryDestination build() { com.google.cloud.aiplatform.v1.ContainerRegistryDestination result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ContainerRegistryDestination buildPartial() { com.google.cloud.aiplatform.v1.ContainerRegistryDestination result = new com.google.cloud.aiplatform.v1.ContainerRegistryDestination(this); result.outputUri_ = outputUri_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ContainerRegistryDestination) { return mergeFrom((com.google.cloud.aiplatform.v1.ContainerRegistryDestination) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ContainerRegistryDestination other) { if (other == com.google.cloud.aiplatform.v1.ContainerRegistryDestination.getDefaultInstance()) return this; if (!other.getOutputUri().isEmpty()) { outputUri_ = other.outputUri_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1.ContainerRegistryDestination parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1.ContainerRegistryDestination) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object outputUri_ = ""; /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The outputUri. */ public java.lang.String getOutputUri() { java.lang.Object ref = outputUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); outputUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for outputUri. */ public com.google.protobuf.ByteString getOutputUriBytes() { java.lang.Object ref = outputUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); outputUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The outputUri to set. * @return This builder for chaining. */ public Builder setOutputUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } outputUri_ = value; onChanged(); return this; } /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearOutputUri() { outputUri_ = getDefaultInstance().getOutputUri(); onChanged(); return this; } /** * * * <pre> * Required. Container Registry URI of a container image. * Only Google Container Registry and Artifact Registry are supported now. * Accepted forms: * * Google Container Registry path. For example: * `gcr.io/projectId/imageName:tag`. * * Artifact Registry path. For example: * `us-central1-docker.pkg.dev/projectId/repoName/imageName:tag`. * If a tag is not specified, "latest" will be used as the default tag. * </pre> * * <code>string output_uri = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for outputUri to set. * @return This builder for chaining. */ public Builder setOutputUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); outputUri_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ContainerRegistryDestination) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ContainerRegistryDestination) private static final com.google.cloud.aiplatform.v1.ContainerRegistryDestination DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ContainerRegistryDestination(); } public static com.google.cloud.aiplatform.v1.ContainerRegistryDestination getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ContainerRegistryDestination> PARSER = new com.google.protobuf.AbstractParser<ContainerRegistryDestination>() { @java.lang.Override public ContainerRegistryDestination parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ContainerRegistryDestination(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ContainerRegistryDestination> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ContainerRegistryDestination> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ContainerRegistryDestination getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/** * The MIT License (MIT) * * Copyright (c) 2013 Olexandr Tyshkovets <olexandr.tyshkovets@gmail.com> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.aint.jfeeder.model.entity; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.AttributeOverride; import javax.persistence.CollectionTable; import javax.persistence.Column; import javax.persistence.ElementCollection; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; /** * Stores the information about a feed. * * @author Olexandr Tyshkovets */ @javax.persistence.Entity @Table(name = "FEED") @AttributeOverride(name = "id", column = @Column(name = "FEED_ID", updatable = false, nullable = false)) public class Feed extends Entity { private static final long serialVersionUID = 5350524030230570660L; private String author; private String description; private String feedType; private String feedImageUrl; private String language; private String link; private Date publishedDate; private String title; private List<String> supportedFeedTypes = new ArrayList<String>(); private List<FeedEntry> entries = new ArrayList<FeedEntry>(); private User user; /** * The default constructor for hibernate. */ protected Feed() { } /** * Constructs a new feed with specified arguments. * * @param author * the feed's author * @param description * the feed's description * @param feedImageUrl * the feed's image URL * @param feedType * the feed's type * @param language * the feed's language * @param link * the feed's link * @param publishedDate * the feed's published date * @param title * the feed's title * @param user * the feed's user */ public Feed(String author, String description, String feedImageUrl, String feedType, String language, String link, Date publishedDate, String title, User user) { this.author = author; this.description = description; this.feedImageUrl = feedImageUrl; this.feedType = feedType; this.language = language; this.link = link; this.publishedDate = publishedDate; this.title = title; this.user = user; } /** * @return the author */ @Column(name = "AUTHOR") public String getAuthor() { return author; } /** * @param author * the author to set */ public void setAuthor(String author) { this.author = author; } /** * @return the description */ @Column(name = "DESCRIPTION") public String getDescription() { return description; } /** * @param description * the description to set */ public void setDescription(String description) { this.description = description; } /** * @return the feed type */ @Column(name = "TYPE") public String getFeedType() { return feedType; } /** * @param feedType * the feed type to set */ public void setFeedType(String feedType) { this.feedType = feedType; } /** * @return the feed image URL */ @Column(name = "IMAGE_URL") public String getFeedImageUrl() { return feedImageUrl; } /** * @param feedImageUrl * the feed image URL to set */ public void setFeedImageUrl(String feedImageUrl) { this.feedImageUrl = feedImageUrl; } /** * @return the language */ @Column(name = "LANGUAGE") public String getLanguage() { return language; } /** * @param language * the language to set */ public void setLanguage(String language) { this.language = language; } /** * @return the link */ @Column(name = "LINK") public String getLink() { return link; } /** * @param link * the link to set */ public void setLink(String link) { this.link = link; } /** * @return the published date */ @Column(name = "PUBLISHED_DATE") public Date getPublishedDate() { return publishedDate; } /** * @param publishedDate * the published date to set */ public void setPublishedDate(Date publishedDate) { this.publishedDate = publishedDate; } /** * @return the title */ @Column(name = "TITLE") public String getTitle() { return title; } /** * @param title * the title to set */ public void setTitle(String title) { this.title = title; } /** * @return the supported feed types */ @ElementCollection @CollectionTable(name = "FEED_TYPE", joinColumns = @JoinColumn(name = "FEED_ID")) @Column(name = "SUPPORTED_TYPES") public List<String> getSupportedFeedTypes() { return supportedFeedTypes; } /** * @param supportedFeedTypes * the supported feed types to set */ public void setSupportedFeedTypes(List<String> supportedFeedTypes) { this.supportedFeedTypes = supportedFeedTypes; } /** * @return the feed entries */ @OneToMany(mappedBy = "feed") public List<FeedEntry> getEntries() { return entries; } /** * @param entries * the feed entries to set */ public void setEntries(List<FeedEntry> entries) { this.entries = entries; } /** * @return the user */ @ManyToOne @JoinColumn(name = "FK_USER", nullable = false) public User getUser() { return user; } /** * @param user * the user to set */ public void setUser(User user) { this.user = user; } /** * Returns a string representation of the feed. * * @return a string representation of the object */ @Override public String toString() { return getClass().getName() + "[id=" + getId() + ", uuid=" + getUuid() + ", author=" + author + ", description=" + description + ", feedType=" + feedType + ", feedImageUrl=" + feedImageUrl + ", language=" + language + ", link=" + link + ", publishedDate=" + publishedDate + ", title=" + title + ", supportedFeedTypes=" + supportedFeedTypes + ", user.username=" + user.getUsername() + "]"; } }
/** * Copyright 2014 National University of Ireland, Galway. * * This file is part of the SIREn project. Project and contact information: * * https://github.com/rdelbru/SIREn * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sindice.siren.search.node; import static org.sindice.siren.analysis.MockSirenToken.node; import static org.sindice.siren.search.AbstractTestSirenScorer.BooleanClauseBuilder.must; import static org.sindice.siren.search.AbstractTestSirenScorer.NodeBooleanQueryBuilder.nbq; import java.io.IOException; import java.util.ArrayList; import org.junit.Test; import org.sindice.siren.analysis.AnyURIAnalyzer; import org.sindice.siren.analysis.TupleAnalyzer; import org.sindice.siren.analysis.AnyURIAnalyzer.URINormalisation; import org.sindice.siren.index.DocsAndNodesIterator; import org.sindice.siren.index.codecs.RandomSirenCodec.PostingsFormatType; import org.sindice.siren.search.AbstractTestSirenScorer; import org.sindice.siren.util.XSDDatatype; public class TestNodeConjunctionScorer extends AbstractTestSirenScorer { @Override protected void configure() throws IOException { this.setAnalyzer(AnalyzerType.TUPLE); // TODO: remove when TupleAnalyzer is no more used final AnyURIAnalyzer uriAnalyzer = new AnyURIAnalyzer(TEST_VERSION_CURRENT); uriAnalyzer.setUriNormalisation(URINormalisation.FULL); ((TupleAnalyzer) analyzer).registerDatatype(XSDDatatype.XSD_ANY_URI.toCharArray(), uriAnalyzer); this.setPostingsFormat(PostingsFormatType.RANDOM); } @Test public void testNextWithTermConjunction() throws Exception { this.addDocuments(new String[] { "<http://renaud.delbru.fr/> . ", "<http://sindice.com/test/name> \"Renaud Delbru\" . ", "<http://sindice.com/test/type> <http://sindice.com/test/Person> . " + "<http://sindice.com/test/name> \"Renaud Delbru\" . " }); final NodeScorer scorer = this.getScorer( nbq(must("renaud"), must("renaud")) ); assertTrue(scorer.nextCandidateDocument()); assertEquals(0, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.nextNode()); assertEquals(node(0,0), scorer.node()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertTrue(scorer.nextCandidateDocument()); assertEquals(1, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.nextNode()); assertEquals(node(0,1), scorer.node()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertTrue(scorer.nextCandidateDocument()); assertEquals(2, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.nextNode()); assertEquals(node(1,1), scorer.node()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertEndOfStream(scorer); } @Test public void testNoNode() throws IOException { this.addDocument("\"eee\" . \"ddd\" . "); final NodeScorer scorer = this.getScorer( nbq(must("ddd"), must("eee")) ); assertTrue(scorer.nextCandidateDocument()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertEndOfStream(scorer); } @Test public void testNoNextCandidate() throws IOException { this.addDocument("\"eee\" . \"ddd\" . "); this.addDocument("\"eee\" . \"fff\" . "); final NodeScorer scorer = this.getScorer( nbq(must("ddd"), must("fff")) ); assertEndOfStream(scorer); } // TODO: To update when phrase query implemented // @Test // public void testNextWithPhraseConjunction() // throws Exception { // this.deleteAll(writer); // this.addDocumentsWithIterator(new String[] { "\"aaa bbb aaa\". ", // "\"aaa bbb aba\" \"aaa ccc bbb aaa\" . ", // "\"aaa bbb ccc\" \"aaa ccc aaa aaa ccc\" . " + // "\" bbb ccc aaa \" \"aaa bbb bbb ccc aaa ccc\" . "}); // // final NodeBooleanScorer scorer = // this.getConjunctionScorer(new String[][] {{"aaa", "bbb"}, {"aaa", "ccc"}}); // // assertFalse(scorer.nextDocument() == DocIdSetIterator.NO_MORE_DOCS); // assertEquals(2, scorer.doc()); // assertEquals(1, scorer.node()[0]); // assertEquals(1, scorer.node()[1]); // assertTrue(scorer.nextDocument() == DocIdSetIterator.NO_MORE_DOCS); // } @Test public void testSkipToCandidate() throws Exception { final ArrayList<String> docs = new ArrayList<String>(); for (int i = 0; i < 32; i++) { docs.add("<http://sindice.com/test/name> \"Renaud Delbru\" . "); docs.add("<http://sindice.com/test/type> <http://sindice.com/test/Person> . "); } this.addDocuments(docs); final NodeScorer scorer = this.getScorer( nbq(must("renaud"), must("delbru")) ); assertTrue(scorer.skipToCandidate(16)); assertEquals(16, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.nextNode()); assertEquals(node(0,1), scorer.node()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertTrue(scorer.skipToCandidate(41)); // should jump to next candidate doc 42 assertEquals(42, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.skipToCandidate(42)); // should stay at the same position assertEquals(42, scorer.doc()); assertEquals(node(-1), scorer.node()); assertTrue(scorer.nextNode()); assertEquals(node(0,1), scorer.node()); assertFalse(scorer.nextNode()); assertEquals(DocsAndNodesIterator.NO_MORE_NOD, scorer.node()); assertFalse(scorer.skipToCandidate(75)); assertEndOfStream(scorer); } /** * The score increases, even though the frequency of each term remains the same. * This is due to the length of the document which gets longer. */ @Test public void testScoreWithTermConjunction() throws Exception { final String[] docs = new String[] { "<http://renaud.delbru.fr/> . ", "<http://sindice.com/test/name> \"Renaud Delbru\" . ", "<http://sindice.com/test/type> <http://sindice.com/test/Person> . " + "<http://sindice.com/test/name> \"Renaud Delbru\" . ", "<http://sindice.com/test/type> <http://sindice.com/test/Person> . " + "<http://sindice.com/test/homepage> <http://renaud.delbru.fr/> . " + "<http://sindice.com/test/name> \"Renaud Delbru\" ." }; this.addDocuments(docs); final LuceneProxyNodeScorer scorer = new LuceneProxyNodeScorer(this.getScorer(nbq(must("renaud"), must("delbru")))); float lastLastScore = 0; float lastScore = 0; assertTrue(scorer.nextDoc() != DocsAndNodesIterator.NO_MORE_DOC); lastLastScore = scorer.score(); assertTrue(scorer.nextDoc() != DocsAndNodesIterator.NO_MORE_DOC); lastScore = scorer.score(); assertTrue("doc=" + scorer.docID() + " lastScore=" + lastLastScore + " score=" + lastScore, lastLastScore > lastScore); assertTrue(scorer.nextDoc() != DocsAndNodesIterator.NO_MORE_DOC); lastLastScore = lastScore; lastScore = scorer.score(); assertTrue("lastScore=" + lastLastScore + " score=" + lastScore, lastLastScore > lastScore); lastLastScore = scorer.score(); assertTrue(scorer.nextDoc() != DocsAndNodesIterator.NO_MORE_DOC); lastLastScore = lastScore; lastScore = scorer.score(); // score() sums the score of both nodes assertTrue("lastScore=" + lastLastScore + " score=" + lastScore, lastLastScore < lastScore); assertFalse(scorer.nextDoc() != DocsAndNodesIterator.NO_MORE_DOC); } }
package com.jwl.tools; import java.util.Random; /** * Random Utils * <ul> * Shuffling algorithm * <li>{@link #shuffle(Object[])} Shuffling algorithm, Randomly permutes the specified array using a default source of * randomness</li> * <li>{@link #shuffle(Object[], int)} Shuffling algorithm, Randomly permutes the specified array</li> * <li>{@link #shuffle(int[])} Shuffling algorithm, Randomly permutes the specified int array using a default source of * randomness</li> * <li>{@link #shuffle(int[], int)} Shuffling algorithm, Randomly permutes the specified int array</li> * </ul> * <ul> * get random int * <li>{@link #getRandom(int)} get random int between 0 and max</li> * <li>{@link #getRandom(int, int)} get random int between min and max</li> * </ul> * <ul> * get random numbers or letters * <li>{@link #getRandomCapitalLetters(int)} get a fixed-length random string, its a mixture of uppercase letters</li> * <li>{@link #getRandomLetters(int)} get a fixed-length random string, its a mixture of uppercase and lowercase letters * </li> * <li>{@link #getRandomLowerCaseLetters(int)} get a fixed-length random string, its a mixture of lowercase letters</li> * <li>{@link #getRandomNumbers(int)} get a fixed-length random string, its a mixture of numbers</li> * <li>{@link #getRandomNumbersAndLetters(int)} get a fixed-length random string, its a mixture of uppercase, lowercase * letters and numbers</li> * <li>{@link #getRandom(String, int)} get a fixed-length random string, its a mixture of chars in source</li> * <li>{@link #getRandom(char[], int)} get a fixed-length random string, its a mixture of chars in sourceChar</li> * </ul> * * @author <a href="http://www.trinea.cn" target="_blank">Trinea</a> 2012-5-12 */ public class RandomUtils { public static final String NUMBERS_AND_LETTERS = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; public static final String NUMBERS = "0123456789"; public static final String LETTERS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; public static final String CAPITAL_LETTERS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; public static final String LOWER_CASE_LETTERS = "abcdefghijklmnopqrstuvwxyz"; private RandomUtils() { throw new AssertionError(); } /** * get a fixed-length random string, its a mixture of uppercase, lowercase letters and numbers * * @param length * @return * @see RandomUtils#getRandom(String source, int length) */ public static String getRandomNumbersAndLetters(int length) { return getRandom(NUMBERS_AND_LETTERS, length); } /** * get a fixed-length random string, its a mixture of numbers * * @param length * @return * @see RandomUtils#getRandom(String source, int length) */ public static String getRandomNumbers(int length) { return getRandom(NUMBERS, length); } /** * get a fixed-length random string, its a mixture of uppercase and lowercase letters * * @param length * @return * @see RandomUtils#getRandom(String source, int length) */ public static String getRandomLetters(int length) { return getRandom(LETTERS, length); } /** * get a fixed-length random string, its a mixture of uppercase letters * * @param length * @return * @see RandomUtils#getRandom(String source, int length) */ public static String getRandomCapitalLetters(int length) { return getRandom(CAPITAL_LETTERS, length); } /** * get a fixed-length random string, its a mixture of lowercase letters * * @param length * @return * @see RandomUtils#getRandom(String source, int length) */ public static String getRandomLowerCaseLetters(int length) { return getRandom(LOWER_CASE_LETTERS, length); } /** * get a fixed-length random string, its a mixture of chars in source * * @param source * @param length * @return <ul> * <li>if source is null or empty, return null</li> * <li>else see {@link RandomUtils#getRandom(char[] sourceChar, int length)}</li> * </ul> */ public static String getRandom(String source, int length) { return StringUtils.isEmpty(source) ? null : getRandom(source.toCharArray(), length); } /** * get a fixed-length random string, its a mixture of chars in sourceChar * * @param sourceChar * @param length * @return <ul> * <li>if sourceChar is null or empty, return null</li> * <li>if length less than 0, return null</li> * </ul> */ public static String getRandom(char[] sourceChar, int length) { if (sourceChar == null || sourceChar.length == 0 || length < 0) { return null; } StringBuilder str = new StringBuilder(length); Random random = new Random(); for (int i = 0; i < length; i++) { str.append(sourceChar[random.nextInt(sourceChar.length)]); } return str.toString(); } /** * get random int between 0 and max * * @param max * @return <ul> * <li>if max <= 0, return 0</li> * <li>else return random int between 0 and max</li> * </ul> */ public static int getRandom(int max) { return getRandom(0, max); } /** * get random int between min and max * * @param min * @param max * @return <ul> * <li>if min > max, return 0</li> * <li>if min == max, return min</li> * <li>else return random int between min and max</li> * </ul> */ public static int getRandom(int min, int max) { if (min > max) { return 0; } if (min == max) { return min; } return min + new Random().nextInt(max - min); } /** * Shuffling algorithm, Randomly permutes the specified array using a default source of randomness * * @param objArray * @return */ public static boolean shuffle(Object[] objArray) { if (objArray == null) { return false; } return shuffle(objArray, getRandom(objArray.length)); } /** * Shuffling algorithm, Randomly permutes the specified array * * @param objArray * @param shuffleCount * @return */ public static boolean shuffle(Object[] objArray, int shuffleCount) { int length; if (objArray == null || shuffleCount < 0 || (length = objArray.length) < shuffleCount) { return false; } for (int i = 1; i <= shuffleCount; i++) { int random = getRandom(length - i); Object temp = objArray[length - i]; objArray[length - i] = objArray[random]; objArray[random] = temp; } return true; } /** * Shuffling algorithm, Randomly permutes the specified int array using a default source of randomness * * @param intArray * @return */ public static int[] shuffle(int[] intArray) { if (intArray == null) { return null; } return shuffle(intArray, getRandom(intArray.length)); } /** * Shuffling algorithm, Randomly permutes the specified int array * * @param intArray * @param shuffleCount * @return */ public static int[] shuffle(int[] intArray, int shuffleCount) { int length; if (intArray == null || shuffleCount < 0 || (length = intArray.length) < shuffleCount) { return null; } int[] out = new int[shuffleCount]; for (int i = 1; i <= shuffleCount; i++) { int random = getRandom(length - i); out[i - 1] = intArray[random]; int temp = intArray[length - i]; intArray[length - i] = intArray[random]; intArray[random] = temp; } return out; } }
/* * Copyright 2000-2008, Atomikos (http://www.atomikos.com) * * This code ("Atomikos TransactionsEssentials"), by itself, * is being distributed under the * Apache License, Version 2.0 ("License"), a copy of which may be found at * http://www.atomikos.com/licenses/apache-license-2.0.txt . * You may not use this file except in compliance with the License. * * While the License grants certain patent license rights, * those patent license rights only extend to the use of * Atomikos TransactionsEssentials by itself. * * This code (Atomikos TransactionsEssentials) contains certain interfaces * in package (namespace) com.atomikos.icatch * (including com.atomikos.icatch.Participant) which, if implemented, may * infringe one or more patents held by Atomikos. * It should be appreciated that you may NOT implement such interfaces; * licensing to implement these interfaces must be obtained separately from Atomikos. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ package com.atomikos.icatch.imp; import java.util.Enumeration; import java.util.Hashtable; import java.util.Stack; import java.util.Vector; import com.atomikos.icatch.HeurCommitException; import com.atomikos.icatch.HeurHazardException; import com.atomikos.icatch.HeurMixedException; import com.atomikos.icatch.HeurRollbackException; import com.atomikos.icatch.HeuristicMessage; import com.atomikos.icatch.Participant; import com.atomikos.icatch.RollbackException; import com.atomikos.icatch.SysException; import com.atomikos.icatch.TxState; /** * * * A state handler for the heuristic hazard coordinator state. */ class HeurHazardStateHandler extends CoordinatorStateHandler { private Vector hazards_; HeurHazardStateHandler ( CoordinatorImp coordinator ) { super ( coordinator ); hazards_ = new Vector (); } HeurHazardStateHandler ( CoordinatorStateHandler previous , Vector hazards ) { super ( previous ); hazards_ = (Vector) hazards.clone (); } HeurHazardStateHandler ( CoordinatorStateHandler previous , Hashtable hazards ) { super ( previous ); hazards_ = new Vector(); hazards_.addAll ( hazards.keySet() ); } protected void recover ( CoordinatorImp coordinator ) { super.recover ( coordinator ); // add all recovered participants to the replay stack // to resume where we left off before the crash, // and try to notify all hazards Enumeration enumm = getCoordinator ().getParticipants ().elements (); while ( enumm.hasMoreElements () ) { Participant p = (Participant) enumm.nextElement (); if ( !getReadOnlyTable ().containsKey ( p ) ) { replayCompletion ( p ); } } // while } protected Object getState () { return TxState.HEUR_HAZARD; } protected void onTimeout () { // this state can only be reached through COMMITTING or ABORTING // so getCommitted can not be null boolean committed = getCommitted ().booleanValue (); addAllForReplay ( hazards_ ); // get Stack to avoid overwriting effects of // intermediate recovery calls Stack replayStack = getReplayStack (); boolean replay = false; if ( !replayStack.empty () ) { replay = true; int count = replayStack.size (); TerminationResult result = new TerminationResult ( count ); while ( !replayStack.empty () ) { Participant part = (Participant) replayStack.pop (); if ( committed ) { CommitMessage cm = new CommitMessage ( part, result, false ); getPropagator ().submitPropagationMessage ( cm ); } else { RollbackMessage rm = new RollbackMessage ( part, result, true ); getPropagator ().submitPropagationMessage ( rm ); } } try { result.waitForReplies (); // remove OK replies from hazards_ list and change state if // hazard_ is empty. Stack replies = result.getReplies (); Enumeration enumm = replies.elements (); while ( enumm.hasMoreElements () ) { Reply reply = (Reply) enumm.nextElement (); if ( !reply.hasFailed () ) { hazards_.remove ( reply.getParticipant () ); } } // TODO if overall result failed: check if heuristic state // should change? // for instance: if mixed replies -> change state to HEURMIXED // NOTE: this can happen on recovery with late registration // where the resource has ended in mixed mode } catch ( InterruptedException inter ) { // return silently; // worst case is some remaining indoubt participants } } if ( hazards_.isEmpty () ) { TerminatedStateHandler termStateHandler = new TerminatedStateHandler ( this ); getCoordinator ().setStateHandler ( termStateHandler ); } else if ( replay ) { // set state to heuristic again, to // notify logging of swapout. // note: only do this if something could have changed such as in replay getCoordinator ().setStateHandler ( this ); } } protected void setGlobalSiblingCount ( int count ) { // nothing to do here } protected int prepare () throws RollbackException, java.lang.IllegalStateException, HeurHazardException, HeurMixedException, SysException { throw new HeurHazardException ( getHeuristicMessages () ); } protected HeuristicMessage[] commit ( boolean onePhase ) throws HeurRollbackException, HeurMixedException, HeurHazardException, java.lang.IllegalStateException, RollbackException, SysException { throw new HeurHazardException ( getHeuristicMessages () ); } protected HeuristicMessage[] rollback () throws HeurCommitException, HeurMixedException, SysException, HeurHazardException, java.lang.IllegalStateException { throw new HeurHazardException ( getHeuristicMessages () ); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.theoryinpractice.testng.inspection; import com.intellij.codeInspection.*; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileChooser.FileChooser; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.search.PsiSearchHelper; import com.intellij.psi.util.PsiClassUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.XmlAttribute; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.IncorrectOperationException; import com.theoryinpractice.testng.configuration.browser.SuiteBrowser; import com.theoryinpractice.testng.util.TestNGUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class UndeclaredTestInspection extends BaseJavaLocalInspectionTool { private static final Logger LOG = Logger.getInstance(UndeclaredTestInspection.class); @Nls @NotNull public String getGroupDisplayName() { return TestNGUtil.TESTNG_GROUP_NAME; } @Nls @NotNull public String getDisplayName() { return "Undeclared test"; } @NonNls @NotNull public String getShortName() { return "UndeclaredTests"; } @Nullable public ProblemDescriptor[] checkClass(@NotNull final PsiClass aClass, @NotNull final InspectionManager manager, final boolean isOnTheFly) { if (TestNGUtil.hasTest(aClass) && PsiClassUtil.isRunnableClass(aClass, true)) { final Project project = aClass.getProject(); final String qName = aClass.getQualifiedName(); if (qName == null) return null; final String packageQName = StringUtil.getPackageName(qName); final List<String> names = new ArrayList<>(); for(int i = 0; i < qName.length(); i++) { if (qName.charAt(i) == '.') { names.add(qName.substring(0, i)); } } names.add(qName); Collections.reverse(names); for (final String name : names) { final boolean isFullName = qName.equals(name); final boolean[] found = new boolean[]{false}; PsiSearchHelper.SERVICE.getInstance(project) .processUsagesInNonJavaFiles(name, (file, startOffset, endOffset) -> { if (file.findReferenceAt(startOffset) != null) { if (!isFullName) { //special package tag required final XmlTag tag = PsiTreeUtil.getParentOfType(file.findElementAt(startOffset), XmlTag.class); if (tag == null || !tag.getName().equals("package")) { return true; } final XmlAttribute attribute = tag.getAttribute("name"); if (attribute == null) return true; final String value = attribute.getValue(); if (value == null) return true; if (!value.endsWith(".*") && !value.equals(packageQName)) return true; } found[0] = true; return false; } return true; }, new TestNGSearchScope(project)); if (found[0]) return null; } final PsiIdentifier nameIdentifier = aClass.getNameIdentifier(); LOG.assertTrue(nameIdentifier != null); return new ProblemDescriptor[]{manager.createProblemDescriptor(nameIdentifier, "Undeclared test \'" + aClass.getName() + "\'", isOnTheFly, new LocalQuickFix[]{new RegisterClassFix(aClass), new CreateTestngFix()}, ProblemHighlightType.GENERIC_ERROR_OR_WARNING)}; } return null; } private static class RegisterClassFix implements LocalQuickFix { private final String myClassName; public RegisterClassFix(final PsiClass aClass) { myClassName = aClass.getName(); } @NotNull public String getName() { return "Register \'" + myClassName + "\'"; } @NotNull public String getFamilyName() { return "Register test"; } public void applyFix(@NotNull final Project project, @NotNull ProblemDescriptor descriptor) { final PsiClass psiClass = PsiTreeUtil.getParentOfType(descriptor.getPsiElement(), PsiClass.class); LOG.assertTrue(psiClass != null); final String testngXmlPath = new SuiteBrowser(project).showDialog(); if (testngXmlPath == null) return; final VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(testngXmlPath); LOG.assertTrue(virtualFile != null); final PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile); LOG.assertTrue(psiFile instanceof XmlFile); final XmlFile testngXML = (XmlFile)psiFile; new WriteCommandAction(project, getName(), testngXML) { protected void run(@NotNull final Result result) throws Throwable { patchTestngXml(testngXML, psiClass); } }.execute(); } @Override public boolean startInWriteAction() { return false; } } //make public for tests only public static void patchTestngXml(final XmlFile testngXML, final PsiClass psiClass) { final XmlTag rootTag = testngXML.getDocument().getRootTag(); if (rootTag != null && rootTag.getName().equals("suite")) { try { XmlTag testTag = rootTag.findFirstSubTag("test"); if (testTag == null) { testTag = (XmlTag)rootTag.add(rootTag.createChildTag("test", rootTag.getNamespace(), null, false)); testTag.setAttribute("name", psiClass.getName()); } XmlTag classesTag = testTag.findFirstSubTag("classes"); if (classesTag == null) { classesTag = (XmlTag)testTag.add(testTag.createChildTag("classes", testTag.getNamespace(), null, false)); } final XmlTag classTag = (XmlTag)classesTag.add(classesTag.createChildTag("class", classesTag.getNamespace(), null, false)); final String qualifiedName = psiClass.getQualifiedName(); LOG.assertTrue(qualifiedName != null); classTag.setAttribute("name", qualifiedName); } catch (IncorrectOperationException e) { LOG.error(e); } } } private static class CreateTestngFix implements LocalQuickFix { @NotNull public String getFamilyName() { return "Create suite"; } public void applyFix(@NotNull final Project project, @NotNull final ProblemDescriptor descriptor) { final PsiClass psiClass = PsiTreeUtil.getParentOfType(descriptor.getPsiElement(), PsiClass.class); final VirtualFile file = FileChooser.chooseFile(FileChooserDescriptorFactory.createSingleFolderDescriptor(), project, null); if (file != null) { final PsiManager psiManager = PsiManager.getInstance(project); final PsiDirectory directory = psiManager.findDirectory(file); LOG.assertTrue(directory != null); new WriteCommandAction(project, getName(), null) { protected void run(@NotNull final Result result) throws Throwable { XmlFile testngXml = (XmlFile)PsiFileFactory.getInstance(psiManager.getProject()) .createFileFromText("testng.xml", "<!DOCTYPE suite SYSTEM \"http://testng.org/testng-1.0.dtd\">\n<suite></suite>"); try { testngXml = (XmlFile)directory.add(testngXml); } catch (IncorrectOperationException e) { //todo suggest new name return; } patchTestngXml(testngXml, psiClass); } }.execute(); } } @Override public boolean startInWriteAction() { return false; } } }
package org.minperf.hem; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.FileLock; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeMap; import java.util.Vector; import java.util.concurrent.ConcurrentMap; import org.junit.Assert; public class MetaFile implements ConcurrentMap<String, String> { private final File file; private final TreeMap<String, String> cache = new TreeMap<String, String>(); private FileChannel channel; private FileLock lock; public MetaFile(String fileName) { this.file = new File(fileName); } public static void main(String... args) { String fileName = args[0]; MetaFile map = new MetaFile(fileName); MetaFile map2 = new MetaFile(fileName); map.clear(); map.put("hello", "world"); Assert.assertEquals("world", map2.get("hello")); } @Override public int size() { read(); return cache.size(); } @Override public boolean isEmpty() { read(); return cache.isEmpty(); } @Override public boolean containsKey(Object key) { read(); return cache.containsKey(key); } @Override public boolean containsValue(Object value) { read(); return cache.containsValue(value); } @Override public Collection<String> values() { read(); return cache.values(); } @Override public Set<String> keySet() { read(); return cache.keySet(); } @Override public Set<Entry<String, String>> entrySet() { read(); return cache.entrySet(); } @Override public String get(Object key) { read(); return cache.get(key); } private void read() { try (FileInputStream in = new FileInputStream(file)) { FileChannel channel = in.getChannel(); try (FileLock lock = channel.lock(0, Long.MAX_VALUE, true)) { Properties prop = new Properties(); prop.load(in); for(Entry<Object, Object> e : prop.entrySet()) { cache.put(e.getKey().toString(), e.getValue().toString()); } } } catch (IOException e) { throw new RuntimeException(e); } } @SuppressWarnings("resource") private void lockForWriting() { try { channel = new RandomAccessFile(file, "rw").getChannel(); lock = channel.lock(); InputStream in = Channels.newInputStream(channel); Properties prop = new Properties(); prop.load(in); for(Entry<Object, Object> e : prop.entrySet()) { cache.put(e.getKey().toString(), e.getValue().toString()); } } catch (IOException e) { throw new RuntimeException(e); } } private void writeAndUnlock() { try { channel.position(0); OutputStream out = Channels.newOutputStream(channel); Properties prop = new Properties() { private static final long serialVersionUID = 1L; @Override public synchronized Enumeration<Object> keys() { Vector<String> v = new Vector<String>(); for (Object o : keySet()) { v.add(o.toString()); } Collections.sort(v); return new Vector<Object>(v).elements(); } }; prop.putAll(cache); prop.store(out, null); out.flush(); channel.truncate(channel.position()); lock.release(); channel.close(); } catch (IOException e) { throw new RuntimeException(e); } } @Override public void clear() { lockForWriting(); try { cache.clear(); } finally { writeAndUnlock(); } } @Override public String remove(Object key) { lockForWriting(); try { return cache.remove(key); } finally { writeAndUnlock(); } } @Override public String put(String key, String value) { lockForWriting(); try { return cache.put(key, value); } finally { writeAndUnlock(); } } @Override public String putIfAbsent(String key, String value) { lockForWriting(); try { return cache.putIfAbsent(key, value); } finally { writeAndUnlock(); } } @Override public void putAll(Map<? extends String, ? extends String> map) { lockForWriting(); try { cache.putAll(map); } finally { writeAndUnlock(); } } @Override public boolean remove(Object key, Object value) { lockForWriting(); try { return cache.remove(key, value); } finally { writeAndUnlock(); } } @Override public boolean replace(String key, String oldValue, String newValue) { lockForWriting(); try { return cache.replace(key, oldValue, newValue); } finally { writeAndUnlock(); } } @Override public String replace(String key, String value) { lockForWriting(); try { return cache.replace(key, value); } finally { writeAndUnlock(); } } }
//======================================================================== //Copyright 2012 David Yu //------------------------------------------------------------------------ //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. //======================================================================== package com.dyuproject.protostuff.runtime; import com.dyuproject.protostuff.AbstractTest; import com.dyuproject.protostuff.ProtostuffIOUtil; import com.dyuproject.protostuff.Schema; /** * Tests for {@link RuntimeView}. * * @author David Yu * @created Nov 9, 2012 */ public class RuntimeViewTest extends AbstractTest { // id name timestamp static final Baz BAZ = newBaz( 128, "baz", 0); static final int EXPECT_BAZ_LEN = 1+2 + 1+1+3 + 1+1; static final int ID_LEN = 3; static final int NAME_LEN = 5; static final int TIMESTAMP_LEN = 2; static final int WITHOUT_ID_LEN = EXPECT_BAZ_LEN - ID_LEN; static final int WITHOUT_NAME_LEN = EXPECT_BAZ_LEN - NAME_LEN; static final int WITHOUT_TIMESTAMP_LEN = EXPECT_BAZ_LEN - TIMESTAMP_LEN; static final String STR_FN_ID = "1"; static final String STR_FN_NAME = "2"; static final String STR_FN_TIMESTAMP = "3"; static Baz newBaz(int id, String name, long timestamp) { Baz message = new Baz(); message.setId(id); message.setName(name); message.setTimestamp(timestamp); return message; } static byte[] ser(Schema<Baz> schema) { return ProtostuffIOUtil.toByteArray(BAZ, schema, buf()); } static int len(Schema<Baz> schema) { return ser(schema).length; } static RuntimeSchema<Baz> rs() { return (RuntimeSchema<Baz>)RuntimeSchema.getSchema(Baz.class); } static Schema<Baz> ex1(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.EXCLUDE, null, args); } static Schema<Baz> ex2(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.EXCLUDE_OPTIMIZED_FOR_MERGE_ONLY, null, args); } static Schema<Baz> in1(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.INCLUDE, null, args); } static Schema<Baz> in2(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.INCLUDE_OPTIMIZED_FOR_MERGE_ONLY, null, args); } static Schema<Baz> EQ(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.EQ, args); } static Schema<Baz> NOTEQ(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.NOTEQ, args); } static Schema<Baz> GT(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.GT, args); } static Schema<Baz> LT(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.LT, args); } static Schema<Baz> RANGE(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.RANGE, args); } static Schema<Baz> NOTRANGE(String ... args) { return RuntimeView.createFrom(rs(), RuntimeView.Factories.PREDICATE, Predicate.Factories.NOTRANGE, args); } // tests public void testLen() { assertEquals(EXPECT_BAZ_LEN, len(rs())); } public void testExcludeBazId() { assertEquals(WITHOUT_ID_LEN, len(ex1("id"))); assertEquals(WITHOUT_ID_LEN, len(ex2("id"))); assertEquals(WITHOUT_ID_LEN, len(in1("name", "timestamp"))); assertEquals(WITHOUT_ID_LEN, len(in2("name", "timestamp"))); assertEquals(WITHOUT_ID_LEN, len(NOTEQ(STR_FN_ID))); assertEquals(WITHOUT_ID_LEN, len(GT(STR_FN_ID))); assertEquals(WITHOUT_ID_LEN, len(RANGE(STR_FN_NAME, STR_FN_TIMESTAMP))); } public void testExcludeBazName() { assertEquals(WITHOUT_NAME_LEN, len(ex1("name"))); assertEquals(WITHOUT_NAME_LEN, len(ex2("name"))); assertEquals(WITHOUT_NAME_LEN, len(in1("id", "timestamp"))); assertEquals(WITHOUT_NAME_LEN, len(in2("id", "timestamp"))); assertEquals(WITHOUT_NAME_LEN, len(NOTEQ(STR_FN_NAME))); assertEquals(WITHOUT_NAME_LEN, len(NOTRANGE(STR_FN_NAME, STR_FN_NAME))); } public void testExcludeBazTimestamp() { assertEquals(WITHOUT_TIMESTAMP_LEN, len(ex1("timestamp"))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(ex2("timestamp"))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(in1("id", "name"))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(in2("id", "name"))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(NOTEQ(STR_FN_TIMESTAMP))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(LT(STR_FN_TIMESTAMP))); assertEquals(WITHOUT_TIMESTAMP_LEN, len(RANGE(STR_FN_ID, STR_FN_NAME))); } public void testIncludeOnlyBazId() { assertEquals(ID_LEN, len(ex1("name", "timestamp"))); assertEquals(ID_LEN, len(ex2("name", "timestamp"))); assertEquals(ID_LEN, len(in1("id"))); assertEquals(ID_LEN, len(in2("id"))); assertEquals(ID_LEN, len(EQ(STR_FN_ID))); assertEquals(ID_LEN, len(LT(STR_FN_NAME))); assertEquals(ID_LEN, len(RANGE(STR_FN_ID, STR_FN_ID))); } public void testIncludeOnlyBazName() { assertEquals(NAME_LEN, len(ex1("id", "timestamp"))); assertEquals(NAME_LEN, len(ex2("id", "timestamp"))); assertEquals(NAME_LEN, len(in1("name"))); assertEquals(NAME_LEN, len(in2("name"))); assertEquals(NAME_LEN, len(EQ(STR_FN_NAME))); assertEquals(NAME_LEN, len(RANGE(STR_FN_NAME, STR_FN_NAME))); } public void testIncludeOnlyBazTimestamp() { assertEquals(TIMESTAMP_LEN, len(ex1("id", "name"))); assertEquals(TIMESTAMP_LEN, len(ex2("id", "name"))); assertEquals(TIMESTAMP_LEN, len(in1("timestamp"))); assertEquals(TIMESTAMP_LEN, len(in2("timestamp"))); assertEquals(TIMESTAMP_LEN, len(EQ(STR_FN_TIMESTAMP))); assertEquals(TIMESTAMP_LEN, len(GT(STR_FN_NAME))); assertEquals(TIMESTAMP_LEN, len(RANGE(STR_FN_TIMESTAMP, STR_FN_TIMESTAMP))); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.binary; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.math.BigDecimal; import java.sql.Time; import java.sql.Timestamp; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.binary.BinaryObjectException; import org.apache.ignite.binary.BinaryReflectiveSerializer; import org.apache.ignite.binary.BinarySerializer; import org.apache.ignite.binary.Binarylizable; import org.apache.ignite.internal.processors.cache.CacheObjectImpl; import org.apache.ignite.internal.processors.query.QueryUtils; import org.apache.ignite.internal.util.GridUnsafe; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.marshaller.MarshallerExclusions; import org.apache.ignite.marshaller.optimized.OptimizedMarshaller; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.query.QueryUtils.isGeometryClass; /** * Binary class descriptor. */ public class BinaryClassDescriptor { /** */ @GridToStringExclude private final BinaryContext ctx; /** */ private final Class<?> cls; /** Configured serializer. */ private final BinarySerializer serializer; /** Serializer that is passed during BinaryClassDescriptor construction. Can differ from {@link #serializer}. */ private final BinarySerializer initialSerializer; /** ID mapper. */ private final BinaryInternalMapper mapper; /** */ private final BinaryWriteMode mode; /** */ private final boolean userType; /** */ private final int typeId; /** */ private final String typeName; /** Affinity key field name. */ private final String affKeyFieldName; /** */ private final Constructor<?> ctor; /** */ private final BinaryFieldAccessor[] fields; /** Write replacer. */ private final BinaryWriteReplacer writeReplacer; /** */ private final Method readResolveMtd; /** */ private final Map<String, BinaryFieldMetadata> stableFieldsMeta; /** Object schemas. Initialized only for serializable classes and contains only 1 entry. */ private final BinarySchema stableSchema; /** Schema registry. */ private final BinarySchemaRegistry schemaReg; /** */ private final boolean registered; /** */ private final boolean useOptMarshaller; /** */ private final boolean excluded; /** */ private final Class<?>[] intfs; /** Whether stable schema was published. */ private volatile boolean stableSchemaPublished; /** * @param ctx Context. * @param cls Class. * @param userType User type flag. * @param typeId Type ID. * @param typeName Type name. * @param affKeyFieldName Affinity key field name. * @param mapper Mapper. * @param serializer Serializer. * @param metaDataEnabled Metadata enabled flag. * @param registered Whether typeId has been successfully registered by MarshallerContext or not. * @throws BinaryObjectException In case of error. */ BinaryClassDescriptor( BinaryContext ctx, Class<?> cls, boolean userType, int typeId, String typeName, @Nullable String affKeyFieldName, @Nullable BinaryInternalMapper mapper, @Nullable BinarySerializer serializer, boolean metaDataEnabled, boolean registered ) throws BinaryObjectException { assert ctx != null; assert cls != null; assert mapper != null; initialSerializer = serializer; // If serializer is not defined at this point, then we have to use OptimizedMarshaller. useOptMarshaller = serializer == null || isGeometryClass(cls); // Reset reflective serializer so that we rely on existing reflection-based serialization. if (serializer instanceof BinaryReflectiveSerializer) serializer = null; this.ctx = ctx; this.cls = cls; this.typeId = typeId; this.userType = userType; this.typeName = typeName; this.affKeyFieldName = affKeyFieldName; this.serializer = serializer; this.mapper = mapper; this.registered = registered; schemaReg = ctx.schemaRegistry(typeId); excluded = MarshallerExclusions.isExcluded(cls); if (excluded) mode = BinaryWriteMode.EXCLUSION; else if (useOptMarshaller) mode = BinaryWriteMode.OPTIMIZED; // Will not be used anywhere. else { if (cls == BinaryEnumObjectImpl.class) mode = BinaryWriteMode.BINARY_ENUM; else mode = serializer != null ? BinaryWriteMode.BINARY : BinaryUtils.mode(cls); } if (useOptMarshaller && userType && !U.isIgnite(cls) && !U.isJdk(cls) && !QueryUtils.isGeometryClass(cls)) { U.warn(ctx.log(), "Class \"" + cls.getName() + "\" cannot be serialized using " + BinaryMarshaller.class.getSimpleName() + " because it either implements Externalizable interface " + "or have writeObject/readObject methods. " + OptimizedMarshaller.class.getSimpleName() + " will be " + "used instead and class instances will be deserialized on the server. Please ensure that all nodes " + "have this class in classpath. To enable binary serialization either implement " + Binarylizable.class.getSimpleName() + " interface or set explicit serializer using " + "BinaryTypeConfiguration.setSerializer() method."); } switch (mode) { case P_BYTE: case P_BOOLEAN: case P_SHORT: case P_CHAR: case P_INT: case P_LONG: case P_FLOAT: case P_DOUBLE: case BYTE: case SHORT: case INT: case LONG: case FLOAT: case DOUBLE: case CHAR: case BOOLEAN: case DECIMAL: case STRING: case UUID: case DATE: case TIMESTAMP: case TIME: case BYTE_ARR: case SHORT_ARR: case INT_ARR: case LONG_ARR: case FLOAT_ARR: case DOUBLE_ARR: case CHAR_ARR: case BOOLEAN_ARR: case DECIMAL_ARR: case STRING_ARR: case UUID_ARR: case DATE_ARR: case TIMESTAMP_ARR: case TIME_ARR: case OBJECT_ARR: case COL: case MAP: case BINARY_OBJ: case ENUM: case BINARY_ENUM: case ENUM_ARR: case CLASS: case OPTIMIZED: case EXCLUSION: ctor = null; fields = null; stableFieldsMeta = null; stableSchema = null; intfs = null; break; case PROXY: ctor = null; fields = null; stableFieldsMeta = null; stableSchema = null; intfs = cls.getInterfaces(); break; case BINARY: ctor = constructor(cls); fields = null; stableFieldsMeta = null; stableSchema = null; intfs = null; break; case OBJECT: // Must not use constructor to honor transient fields semantics. ctor = null; Map<Object, BinaryFieldAccessor> fields0; if (BinaryUtils.FIELDS_SORTED_ORDER) { fields0 = new TreeMap<>(); stableFieldsMeta = metaDataEnabled ? new TreeMap<String, BinaryFieldMetadata>() : null; } else { fields0 = new LinkedHashMap<>(); stableFieldsMeta = metaDataEnabled ? new LinkedHashMap<String, BinaryFieldMetadata>() : null; } Set<String> duplicates = duplicateFields(cls); Collection<String> names = new HashSet<>(); Collection<Integer> ids = new HashSet<>(); for (Class<?> c = cls; c != null && !c.equals(Object.class); c = c.getSuperclass()) { for (Field f : c.getDeclaredFields()) { if (serializeField(f)) { f.setAccessible(true); String name = f.getName(); if (duplicates.contains(name)) name = BinaryUtils.qualifiedFieldName(c, name); boolean added = names.add(name); assert added : name; int fieldId = this.mapper.fieldId(typeId, name); if (!ids.add(fieldId)) throw new BinaryObjectException("Duplicate field ID: " + name); BinaryFieldAccessor fieldInfo = BinaryFieldAccessor.create(f, fieldId); fields0.put(name, fieldInfo); if (metaDataEnabled) stableFieldsMeta.put(name, new BinaryFieldMetadata(fieldInfo)); } } } fields = fields0.values().toArray(new BinaryFieldAccessor[fields0.size()]); BinarySchema.Builder schemaBuilder = BinarySchema.Builder.newBuilder(); for (BinaryFieldAccessor field : fields) schemaBuilder.addField(field.id); stableSchema = schemaBuilder.build(); intfs = null; break; default: // Should never happen. throw new BinaryObjectException("Invalid mode: " + mode); } BinaryWriteReplacer writeReplacer0 = BinaryUtils.writeReplacer(cls); Method writeReplaceMthd; if (mode == BinaryWriteMode.BINARY || mode == BinaryWriteMode.OBJECT) { readResolveMtd = U.findNonPublicMethod(cls, "readResolve"); writeReplaceMthd = U.findNonPublicMethod(cls, "writeReplace"); } else { readResolveMtd = null; writeReplaceMthd = null; } if (writeReplaceMthd != null && writeReplacer0 == null) writeReplacer0 = new BinaryMethodWriteReplacer(writeReplaceMthd); writeReplacer = writeReplacer0; } /** * Find all fields with duplicate names in the class. * * @param cls Class. * @return Fields with duplicate names. */ private static Set<String> duplicateFields(Class cls) { Set<String> all = new HashSet<>(); Set<String> duplicates = new HashSet<>(); for (Class<?> c = cls; c != null && !c.equals(Object.class); c = c.getSuperclass()) { for (Field f : c.getDeclaredFields()) { if (serializeField(f)) { String name = f.getName(); if (!all.add(name)) duplicates.add(name); } } } return duplicates; } /** * Whether the field must be serialized. * * @param f Field. * @return {@code True} if must be serialized. */ private static boolean serializeField(Field f) { int mod = f.getModifiers(); return !Modifier.isStatic(mod) && !Modifier.isTransient(mod); } /** * @return {@code True} if enum. */ boolean isEnum() { return mode == BinaryWriteMode.ENUM; } /** * @return Described class. */ Class<?> describedClass() { return cls; } /** * @return Type ID. */ public int typeId() { return typeId; } /** * @return Type name. */ String typeName() { return typeName; } /** * @return Type mapper. */ BinaryInternalMapper mapper() { return mapper; } /** * @return Serializer. */ BinarySerializer serializer() { return serializer; } /** * @return Initial serializer that is passed during BinaryClassDescriptor construction. * Can differ from {@link #serializer}. */ BinarySerializer initialSerializer() { return initialSerializer; } /** * @return Affinity field key name. */ String affFieldKeyName() { return affKeyFieldName; } /** * @return User type flag. */ boolean userType() { return userType; } /** * @return Fields meta data. */ Map<String, BinaryFieldMetadata> fieldsMeta() { return stableFieldsMeta; } /** * @return Schema. */ BinarySchema schema() { return stableSchema; } /** * @return Whether typeId has been successfully registered by MarshallerContext or not. */ public boolean registered() { return registered; } /** * @return {@code true} if {@link OptimizedMarshaller} must be used instead of {@link BinaryMarshaller} * for object serialization and deserialization. */ public boolean useOptimizedMarshaller() { return useOptMarshaller; } /** * Checks whether the class values are explicitly excluded from marshalling. * * @return {@code true} if excluded, {@code false} otherwise. */ public boolean excluded() { return excluded; } /** * @return {@code True} if write-replace should be performed for class. */ public boolean isWriteReplace() { return writeReplacer != null; } /** * Perform write replace. * * @param obj Original object. * @return Replaced object. */ public Object writeReplace(Object obj) { assert isWriteReplace(); return writeReplacer.replace(obj); } /** * @return binaryReadResolve() method */ @SuppressWarnings("UnusedDeclaration") @Nullable Method getReadResolveMethod() { return readResolveMtd; } /** * @param obj Object. * @param writer Writer. * @throws BinaryObjectException In case of error. */ void write(Object obj, BinaryWriterExImpl writer) throws BinaryObjectException { assert obj != null; assert writer != null; assert mode != BinaryWriteMode.OPTIMIZED : "OptimizedMarshaller should not be used here: " + cls.getName(); writer.typeId(typeId); switch (mode) { case P_BYTE: case BYTE: writer.writeByteFieldPrimitive((byte) obj); break; case P_SHORT: case SHORT: writer.writeShortFieldPrimitive((short)obj); break; case P_INT: case INT: writer.writeIntFieldPrimitive((int) obj); break; case P_LONG: case LONG: writer.writeLongFieldPrimitive((long) obj); break; case P_FLOAT: case FLOAT: writer.writeFloatFieldPrimitive((float) obj); break; case P_DOUBLE: case DOUBLE: writer.writeDoubleFieldPrimitive((double) obj); break; case P_CHAR: case CHAR: writer.writeCharFieldPrimitive((char) obj); break; case P_BOOLEAN: case BOOLEAN: writer.writeBooleanFieldPrimitive((boolean) obj); break; case DECIMAL: writer.doWriteDecimal((BigDecimal)obj); break; case STRING: writer.doWriteString((String)obj); break; case UUID: writer.doWriteUuid((UUID)obj); break; case DATE: writer.doWriteDate((Date)obj); break; case TIMESTAMP: writer.doWriteTimestamp((Timestamp)obj); break; case TIME: writer.doWriteTime((Time)obj); break; case BYTE_ARR: writer.doWriteByteArray((byte[])obj); break; case SHORT_ARR: writer.doWriteShortArray((short[]) obj); break; case INT_ARR: writer.doWriteIntArray((int[]) obj); break; case LONG_ARR: writer.doWriteLongArray((long[]) obj); break; case FLOAT_ARR: writer.doWriteFloatArray((float[]) obj); break; case DOUBLE_ARR: writer.doWriteDoubleArray((double[]) obj); break; case CHAR_ARR: writer.doWriteCharArray((char[]) obj); break; case BOOLEAN_ARR: writer.doWriteBooleanArray((boolean[]) obj); break; case DECIMAL_ARR: writer.doWriteDecimalArray((BigDecimal[]) obj); break; case STRING_ARR: writer.doWriteStringArray((String[]) obj); break; case UUID_ARR: writer.doWriteUuidArray((UUID[]) obj); break; case DATE_ARR: writer.doWriteDateArray((Date[]) obj); break; case TIMESTAMP_ARR: writer.doWriteTimestampArray((Timestamp[]) obj); break; case TIME_ARR: writer.doWriteTimeArray((Time[]) obj); break; case OBJECT_ARR: writer.doWriteObjectArray((Object[])obj); break; case COL: writer.doWriteCollection((Collection<?>)obj); break; case MAP: writer.doWriteMap((Map<?, ?>)obj); break; case ENUM: writer.doWriteEnum((Enum<?>)obj); break; case BINARY_ENUM: writer.doWriteBinaryEnum((BinaryEnumObjectImpl)obj); break; case ENUM_ARR: writer.doWriteEnumArray((Object[])obj); break; case CLASS: writer.doWriteClass((Class)obj); break; case PROXY: writer.doWriteProxy((Proxy)obj, intfs); break; case BINARY_OBJ: writer.doWriteBinaryObject((BinaryObjectImpl)obj); break; case BINARY: if (preWrite(writer, obj)) { try { if (serializer != null) serializer.writeBinary(obj, writer); else ((Binarylizable)obj).writeBinary(writer); postWrite(writer); // Check whether we need to update metadata. if (obj.getClass() != BinaryMetadata.class) { int schemaId = writer.schemaId(); if (schemaReg.schema(schemaId) == null) { // This is new schema, let's update metadata. BinaryMetadataCollector collector = new BinaryMetadataCollector(typeId, typeName, mapper); if (serializer != null) serializer.writeBinary(obj, collector); else ((Binarylizable)obj).writeBinary(collector); BinarySchema newSchema = collector.schema(); BinaryMetadata meta = new BinaryMetadata(typeId, typeName, collector.meta(), affKeyFieldName, Collections.singleton(newSchema), false); ctx.updateMetadata(typeId, meta); schemaReg.addSchema(newSchema.schemaId(), newSchema); } } postWriteHashCode(writer, obj); } finally { writer.popSchema(); } } break; case OBJECT: if (userType && !stableSchemaPublished) { // Update meta before write object with new schema BinaryMetadata meta = new BinaryMetadata(typeId, typeName, stableFieldsMeta, affKeyFieldName, Collections.singleton(stableSchema), false); ctx.updateMetadata(typeId, meta); schemaReg.addSchema(stableSchema.schemaId(), stableSchema); stableSchemaPublished = true; } if (preWrite(writer, obj)) { try { for (BinaryFieldAccessor info : fields) info.write(obj, writer); writer.schemaId(stableSchema.schemaId()); postWrite(writer); postWriteHashCode(writer, obj); } finally { writer.popSchema(); } } break; default: assert false : "Invalid mode: " + mode; } } /** * @param reader Reader. * @return Object. * @throws BinaryObjectException If failed. */ Object read(BinaryReaderExImpl reader) throws BinaryObjectException { try { assert reader != null; assert mode != BinaryWriteMode.OPTIMIZED : "OptimizedMarshaller should not be used here: " + cls.getName(); Object res; switch (mode) { case BINARY: res = newInstance(); reader.setHandle(res); if (serializer != null) serializer.readBinary(res, reader); else ((Binarylizable)res).readBinary(reader); break; case OBJECT: res = newInstance(); reader.setHandle(res); for (BinaryFieldAccessor info : fields) info.read(res, reader); break; default: assert false : "Invalid mode: " + mode; return null; } if (readResolveMtd != null) { try { res = readResolveMtd.invoke(res); reader.setHandle(res); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { if (e.getTargetException() instanceof BinaryObjectException) throw (BinaryObjectException)e.getTargetException(); throw new BinaryObjectException("Failed to execute readResolve() method on " + res, e); } } return res; } catch (Exception e) { if (S.INCLUDE_SENSITIVE && !F.isEmpty(typeName)) throw new BinaryObjectException("Failed to deserialize object [typeName=" + typeName + ']', e); else throw new BinaryObjectException("Failed to deserialize object [typeId=" + typeId + ']', e); } } /** * Pre-write phase. * * @param writer Writer. * @param obj Object. * @return Whether further write is needed. */ private boolean preWrite(BinaryWriterExImpl writer, Object obj) { if (writer.tryWriteAsHandle(obj)) return false; writer.preWrite(registered ? null : cls.getName()); return true; } /** * Post-write phase. * * @param writer Writer. */ private void postWrite(BinaryWriterExImpl writer) { writer.postWrite(userType, registered); } /** * Post-write routine for hash code. * * @param writer Writer. * @param obj Object. */ private void postWriteHashCode(BinaryWriterExImpl writer, Object obj) { // No need to call "postWriteHashCode" here because we do not care about hash code. if (!(obj instanceof CacheObjectImpl)) writer.postWriteHashCode(registered ? null : cls.getName()); } /** * @return Instance. * @throws BinaryObjectException In case of error. */ private Object newInstance() throws BinaryObjectException { try { return ctor != null ? ctor.newInstance() : GridUnsafe.allocateInstance(cls); } catch (InstantiationException | InvocationTargetException | IllegalAccessException e) { throw new BinaryObjectException("Failed to instantiate instance: " + cls, e); } } /** * @param cls Class. * @return Constructor. * @throws BinaryObjectException If constructor doesn't exist. */ @SuppressWarnings("ConstantConditions") @Nullable private static Constructor<?> constructor(Class<?> cls) throws BinaryObjectException { assert cls != null; try { Constructor<?> ctor = U.forceEmptyConstructor(cls); if (ctor == null) throw new BinaryObjectException("Failed to find empty constructor for class: " + cls.getName()); ctor.setAccessible(true); return ctor; } catch (IgniteCheckedException e) { throw new BinaryObjectException("Failed to get constructor for class: " + cls.getName(), e); } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(BinaryClassDescriptor.class, this); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.metrics2.lib; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.apache.hadoop.test.MockitoMaker.*; import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsSource; import org.apache.hadoop.metrics2.annotation.Metric; import org.apache.hadoop.metrics2.annotation.Metric.*; import org.apache.hadoop.metrics2.annotation.Metrics; import org.apache.hadoop.metrics2.impl.MsInfo; import static org.apache.hadoop.metrics2.lib.Interns.*; import static org.apache.hadoop.test.MetricsAsserts.*; public class TestMetricsAnnotations { static class MyMetrics { @Metric MutableCounterInt c1; @Metric({"Counter2", "Counter2 desc"}) MutableCounterLong c2; @Metric MutableGaugeInt g1, g2; @Metric("g3 desc") MutableGaugeLong g3; @Metric MutableRate r1; @Metric MutableStat s1; @Metric MutableRates rs1; } @Test public void testFields() { MyMetrics metrics = new MyMetrics(); MetricsSource source = MetricsAnnotations.makeSource(metrics); metrics.c1.incr(); metrics.c2.incr(); metrics.g1.incr(); metrics.g2.incr(); metrics.g3.incr(); metrics.r1.add(1); metrics.s1.add(1); metrics.rs1.add("rs1", 1); MetricsRecordBuilder rb = getMetrics(source); verify(rb).addCounter(info("C1", "C1"), 1); verify(rb).addCounter(info("Counter2", "Counter2 desc"), 1L); verify(rb).addGauge(info("G1", "G1"), 1); verify(rb).addGauge(info("G2", "G2"), 1); verify(rb).addGauge(info("G3", "g3 desc"), 1L); verify(rb).addCounter(info("R1NumOps", "Number of ops for r1"), 1L); verify(rb).addGauge(info("R1AvgTime", "Average time for r1"), 1.0); verify(rb).addCounter(info("S1NumOps", "Number of ops for s1"), 1L); verify(rb).addGauge(info("S1AvgTime", "Average time for s1"), 1.0); verify(rb).addCounter(info("Rs1NumOps", "Number of ops for rs1"), 1L); verify(rb).addGauge(info("Rs1AvgTime", "Average time for rs1"), 1.0); } static class BadMetrics { @Metric Integer i0; } @Test(expected=MetricsException.class) public void testBadFields() { MetricsAnnotations.makeSource(new BadMetrics()); } static class MyMetrics2 { @Metric int getG1() { return 1; } @Metric long getG2() { return 2; } @Metric float getG3() { return 3; } @Metric double getG4() { return 4; } @Metric(type=Type.COUNTER) int getC1() { return 1; } @Metric(type=Type.COUNTER) long getC2() { return 2; } @Metric(type=Type.TAG) String getT1() { return "t1"; } } @Test public void testMethods() { MyMetrics2 metrics = new MyMetrics2(); MetricsSource source = MetricsAnnotations.makeSource(metrics); MetricsRecordBuilder rb = getMetrics(source); verify(rb).addGauge(info("G1", "G1"), 1); verify(rb).addGauge(info("G2", "G2"), 2L); verify(rb).addGauge(info("G3", "G3"), 3.0f); verify(rb).addGauge(info("G4", "G4"), 4.0); verify(rb).addCounter(info("C1", "C1"), 1); verify(rb).addCounter(info("C2", "C2"), 2L); verify(rb).tag(info("T1", "T1"), "t1"); } static class BadMetrics2 { @Metric int foo(int i) { return i; } } @Test(expected=IllegalArgumentException.class) public void testBadMethodWithArgs() { MetricsAnnotations.makeSource(new BadMetrics2()); } static class BadMetrics3 { @Metric boolean foo() { return true; } } @Test(expected=MetricsException.class) public void testBadMethodReturnType() { MetricsAnnotations.makeSource(new BadMetrics3()); } @Metrics(about="My metrics", context="foo") static class MyMetrics3 { @Metric int getG1() { return 1; } } @Test public void testClasses() { MetricsRecordBuilder rb = getMetrics( MetricsAnnotations.makeSource(new MyMetrics3())); MetricsCollector collector = rb.parent(); verify(collector).addRecord(info("MyMetrics3", "My metrics")); verify(rb).add(tag(MsInfo.Context, "foo")); } static class HybridMetrics implements MetricsSource { final MetricsRegistry registry = new MetricsRegistry("HybridMetrics") .setContext("hybrid"); @Metric("C0 desc") MutableCounterInt C0; @Metric int getG0() { return 0; } public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo") .setContext("foocontext") .addCounter(info("C1", "C1 desc"), 1) .endRecord() .addRecord("bar") .setContext("barcontext") .addGauge(info("G1", "G1 desc"), 1); registry.snapshot(collector.addRecord(registry.info()), all); } } @Test public void testHybrid() { HybridMetrics metrics = new HybridMetrics(); MetricsSource source = MetricsAnnotations.makeSource(metrics); assertSame(metrics, source); metrics.C0.incr(); MetricsRecordBuilder rb = getMetrics(source); MetricsCollector collector = rb.parent(); verify(collector).addRecord("foo"); verify(collector).addRecord("bar"); verify(collector).addRecord(info("HybridMetrics", "HybridMetrics")); verify(rb).setContext("foocontext"); verify(rb).addCounter(info("C1", "C1 desc"), 1); verify(rb).setContext("barcontext"); verify(rb).addGauge(info("G1", "G1 desc"), 1); verify(rb).add(tag(MsInfo.Context, "hybrid")); verify(rb).addCounter(info("C0", "C0 desc"), 1); verify(rb).addGauge(info("G0", "G0"), 0); } @Metrics(context="hybrid") static class BadHybridMetrics implements MetricsSource { @Metric MutableCounterInt c1; public void getMetrics(MetricsCollector collector, boolean all) { collector.addRecord("foo"); } } @Test(expected=MetricsException.class) public void testBadHybrid() { MetricsAnnotations.makeSource(new BadHybridMetrics()); } static class EmptyMetrics { int foo; } @Test(expected=MetricsException.class) public void testEmptyMetrics() { MetricsAnnotations.makeSource(new EmptyMetrics()); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.logging; import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.Const; import org.pentaho.di.core.KettleClientEnvironment; import org.pentaho.di.core.Result; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBase; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.Job; import org.pentaho.di.repository.RepositoryAttributeInterface; import org.pentaho.di.trans.HasDatabasesInterface; import org.pentaho.di.trans.step.StepMeta; import org.w3c.dom.Node; /** * This class describes a job logging table * * @author matt * */ public class JobLogTable extends BaseLogTable implements Cloneable, LogTableInterface { private static Class<?> PKG = JobLogTable.class; // for i18n purposes, needed by Translator2!! public static final String XML_TAG = "job-log-table"; public enum ID { ID_JOB( "ID_JOB" ), CHANNEL_ID( "CHANNEL_ID" ), JOBNAME( "JOBNAME" ), STATUS( "STATUS" ), LINES_READ( "LINES_READ" ), LINES_WRITTEN( "LINES_WRITTEN" ), LINES_UPDATED( "LINES_UPDATED" ), LINES_INPUT( "LINES_INPUT" ), LINES_OUTPUT( "LINES_OUTPUT" ), LINES_REJECTED( "LINES_REJECTED" ), ERRORS( "ERRORS" ), STARTDATE( "STARTDATE" ), ENDDATE( "ENDDATE" ), LOGDATE( "LOGDATE" ), DEPDATE( "DEPDATE" ), REPLAYDATE( "REPLAYDATE" ), LOG_FIELD( "LOG_FIELD" ), EXECUTING_SERVER( "EXECUTING_SERVER" ), EXECUTING_USER( "EXECUTING_USER" ), START_JOB_ENTRY( "START_JOB_ENTRY" ), CLIENT( "CLIENT" ); private String id; private ID( String id ) { this.id = id; } public String toString() { return id; } } private String logInterval; private String logSizeLimit; private JobLogTable( VariableSpace space, HasDatabasesInterface databasesInterface ) { super( space, databasesInterface, null, null, null ); } @Override public Object clone() { try { JobLogTable table = (JobLogTable) super.clone(); table.fields = new ArrayList<LogTableField>(); for ( LogTableField field : this.fields ) { table.fields.add( (LogTableField) field.clone() ); } return table; } catch ( CloneNotSupportedException e ) { return null; } } public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " " ).append( XMLHandler.openTag( XML_TAG ) ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "connection", connectionName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "size_limit_lines", logSizeLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "interval", logInterval ) ); retval.append( " " ).append( XMLHandler.addTagValue( "timeout_days", timeoutInDays ) ); retval.append( super.getFieldsXML() ); retval.append( " " ).append( XMLHandler.closeTag( XML_TAG ) ).append( Const.CR ); return retval.toString(); } public void loadXML( Node node, List<DatabaseMeta> databases, List<StepMeta> steps ) { connectionName = XMLHandler.getTagValue( node, "connection" ); schemaName = XMLHandler.getTagValue( node, "schema" ); tableName = XMLHandler.getTagValue( node, "table" ); logSizeLimit = XMLHandler.getTagValue( node, "size_limit_lines" ); logInterval = XMLHandler.getTagValue( node, "interval" ); timeoutInDays = XMLHandler.getTagValue( node, "timeout_days" ); super.loadFieldsXML( node ); } public void saveToRepository( RepositoryAttributeInterface attributeInterface ) throws KettleException { super.saveToRepository( attributeInterface ); // Also save the log interval and log size limit // attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_INTERVAL, logInterval ); attributeInterface.setAttribute( getLogTableCode() + PROP_LOG_TABLE_SIZE_LIMIT, logSizeLimit ); } public void loadFromRepository( RepositoryAttributeInterface attributeInterface ) throws KettleException { super.loadFromRepository( attributeInterface ); logInterval = attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_INTERVAL ); logSizeLimit = attributeInterface.getAttributeString( getLogTableCode() + PROP_LOG_TABLE_SIZE_LIMIT ); } @Override public void replaceMeta( LogTableCoreInterface logTableInterface ) { if ( !( logTableInterface instanceof JobLogTable ) ) { return; } JobLogTable logTable = (JobLogTable) logTableInterface; super.replaceMeta( logTable ); logInterval = logTable.logInterval; logSizeLimit = logTable.logSizeLimit; } //CHECKSTYLE:LineLength:OFF public static JobLogTable getDefault( VariableSpace space, HasDatabasesInterface databasesInterface ) { JobLogTable table = new JobLogTable( space, databasesInterface ); table.fields.add( new LogTableField( ID.ID_JOB.id, true, false, "ID_JOB", BaseMessages.getString( PKG, "JobLogTable.FieldName.BatchID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.BatchID" ), ValueMetaInterface.TYPE_INTEGER, 8 ) ); table.fields.add( new LogTableField( ID.CHANNEL_ID.id, true, false, "CHANNEL_ID", BaseMessages.getString( PKG, "JobLogTable.FieldName.ChannelID" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ChannelID" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.JOBNAME.id, true, false, "JOBNAME", BaseMessages.getString( PKG, "JobLogTable.FieldName.JobName" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.JobName" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.STATUS.id, true, false, "STATUS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Status" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Status" ), ValueMetaInterface.TYPE_STRING, 15 ) ); table.fields.add( new LogTableField( ID.LINES_READ.id, true, false, "LINES_READ", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRead" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRead" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_WRITTEN.id, true, false, "LINES_WRITTEN", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesWritten" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesWritten" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_UPDATED.id, true, false, "LINES_UPDATED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesUpdated" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesUpdated" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_INPUT.id, true, false, "LINES_INPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesInput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesInput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_OUTPUT.id, true, false, "LINES_OUTPUT", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesOutput" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesOutput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.LINES_REJECTED.id, true, false, "LINES_REJECTED", BaseMessages.getString( PKG, "JobLogTable.FieldName.LinesRejected" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LinesRejected" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.ERRORS.id, true, false, "ERRORS", BaseMessages.getString( PKG, "JobLogTable.FieldName.Errors" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Errors" ), ValueMetaInterface.TYPE_INTEGER, 18 ) ); table.fields.add( new LogTableField( ID.STARTDATE.id, true, false, "STARTDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.ENDDATE.id, true, false, "ENDDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.EndDateRange" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.EndDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.LOGDATE.id, true, false, "LOGDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.DEPDATE.id, true, false, "DEPDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.DepDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.DepDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.REPLAYDATE.id, true, false, "REPLAYDATE", BaseMessages.getString( PKG, "JobLogTable.FieldName.ReplayDate" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ReplayDate" ), ValueMetaInterface.TYPE_DATE, -1 ) ); table.fields.add( new LogTableField( ID.LOG_FIELD.id, true, false, "LOG_FIELD", BaseMessages.getString( PKG, "JobLogTable.FieldName.LogField" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.LogField" ), ValueMetaInterface.TYPE_STRING, DatabaseMeta.CLOB_LENGTH ) ); table.fields.add( new LogTableField( ID.EXECUTING_SERVER.id, false, false, "EXECUTING_SERVER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingServer" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingServer" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.EXECUTING_USER.id, false, false, "EXECUTING_USER", BaseMessages.getString( PKG, "JobLogTable.FieldName.ExecutingUser" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.ExecutingUser" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.START_JOB_ENTRY.id, false, false, "START_JOB_ENTRY", BaseMessages.getString( PKG, "JobLogTable.FieldName.StartingJobEntry" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.StartingJobEntry" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.fields.add( new LogTableField( ID.CLIENT.id, false, false, "CLIENT", BaseMessages.getString( PKG, "JobLogTable.FieldName.Client" ), BaseMessages.getString( PKG, "JobLogTable.FieldDescription.Client" ), ValueMetaInterface.TYPE_STRING, 255 ) ); table.findField( ID.ID_JOB ).setKey( true ); table.findField( ID.LOGDATE ).setLogDateField( true ); table.findField( ID.LOG_FIELD ).setLogField( true ); table.findField( ID.CHANNEL_ID ).setVisible( false ); table.findField( ID.JOBNAME ).setVisible( false ); table.findField( ID.STATUS ).setStatusField( true ); table.findField( ID.ERRORS ).setErrorsField( true ); table.findField( ID.JOBNAME ).setNameField( true ); return table; } public LogTableField findField( ID id ) { return super.findField( id.id ); } public Object getSubject( ID id ) { return super.getSubject( id.id ); } public String getSubjectString( ID id ) { return super.getSubjectString( id.id ); } public void setBatchIdUsed( boolean use ) { findField( ID.ID_JOB ).setEnabled( use ); } public boolean isBatchIdUsed() { return findField( ID.ID_JOB ).isEnabled(); } public void setLogFieldUsed( boolean use ) { findField( ID.LOG_FIELD ).setEnabled( use ); } public boolean isLogFieldUsed() { return findField( ID.LOG_FIELD ).isEnabled(); } public String getStepnameRead() { return getSubjectString( ID.LINES_READ ); } public String getStepnameWritten() { return getSubjectString( ID.LINES_WRITTEN ); } public String getStepnameInput() { return getSubjectString( ID.LINES_INPUT ); } public String getStepnameOutput() { return getSubjectString( ID.LINES_OUTPUT ); } public String getStepnameUpdated() { return getSubjectString( ID.LINES_UPDATED ); } public String getStepnameRejected() { return getSubjectString( ID.LINES_REJECTED ); } /** * Sets the logging interval in seconds. Disabled if the logging interval is <=0. * * @param logInterval * The log interval value. A value higher than 0 means that the log table is updated every 'logInterval' * seconds. */ public void setLogInterval( String logInterval ) { this.logInterval = logInterval; } /** * Get the logging interval in seconds. Disabled if the logging interval is <=0. A value higher than 0 means that the * log table is updated every 'logInterval' seconds. * * @param logInterval * The log interval, */ public String getLogInterval() { return logInterval; } /** * @return the logSizeLimit */ public String getLogSizeLimit() { return logSizeLimit; } /** * @param logSizeLimit * the logSizeLimit to set */ public void setLogSizeLimit( String logSizeLimit ) { this.logSizeLimit = logSizeLimit; } /** * This method calculates all the values that are required * * @param id * the id to use or -1 if no id is needed * @param status * the log status to use */ public RowMetaAndData getLogRecord( LogStatus status, Object subject, Object parent ) { if ( subject == null || subject instanceof Job ) { Job job = (Job) subject; Result result = null; if ( job != null ) { result = job.getResult(); } RowMetaAndData row = new RowMetaAndData(); for ( LogTableField field : fields ) { if ( field.isEnabled() ) { Object value = null; if ( job != null ) { switch ( ID.valueOf( field.getId() ) ) { case ID_JOB: value = new Long( job.getBatchId() ); break; case CHANNEL_ID: value = job.getLogChannelId(); break; case JOBNAME: value = job.getJobname(); break; case STATUS: value = status.getStatus(); break; case LINES_READ: value = result == null ? null : new Long( result.getNrLinesRead() ); break; case LINES_WRITTEN: value = result == null ? null : new Long( result.getNrLinesWritten() ); break; case LINES_INPUT: value = result == null ? null : new Long( result.getNrLinesInput() ); break; case LINES_OUTPUT: value = result == null ? null : new Long( result.getNrLinesOutput() ); break; case LINES_UPDATED: value = result == null ? null : new Long( result.getNrLinesUpdated() ); break; case LINES_REJECTED: value = result == null ? null : new Long( result.getNrLinesRejected() ); break; case ERRORS: value = result == null ? null : new Long( result.getNrErrors() ); break; case STARTDATE: value = job.getStartDate(); break; case LOGDATE: value = job.getLogDate(); break; case ENDDATE: value = job.getEndDate(); break; case DEPDATE: value = job.getDepDate(); break; case REPLAYDATE: value = job.getCurrentDate(); break; case LOG_FIELD: value = getLogBuffer( job, job.getLogChannelId(), status, logSizeLimit ); break; case EXECUTING_SERVER: value = job.getExecutingServer(); break; case EXECUTING_USER: value = job.getExecutingUser(); break; case START_JOB_ENTRY: value = job.getStartJobEntryCopy() != null ? job.getStartJobEntryCopy().getName() : null; break; case CLIENT: value = KettleClientEnvironment.getInstance().getClient() != null ? KettleClientEnvironment .getInstance().getClient().toString() : "unknown"; break; default: break; } } row.addValue( field.getFieldName(), field.getDataType(), value ); row.getRowMeta().getValueMeta( row.size() - 1 ).setLength( field.getLength() ); } } return row; } else { return null; } } public String getLogTableCode() { return "JOB"; } public String getLogTableType() { return BaseMessages.getString( PKG, "JobLogTable.Type.Description" ); } public String getConnectionNameVariable() { return Const.KETTLE_JOB_LOG_DB; } public String getSchemaNameVariable() { return Const.KETTLE_JOB_LOG_SCHEMA; } public String getTableNameVariable() { return Const.KETTLE_JOB_LOG_TABLE; } public List<RowMetaInterface> getRecommendedIndexes() { List<RowMetaInterface> indexes = new ArrayList<RowMetaInterface>(); // First index : ID_JOB if any is used. // if ( isBatchIdUsed() ) { RowMetaInterface batchIndex = new RowMeta(); LogTableField keyField = getKeyField(); ValueMetaInterface keyMeta = new ValueMetaBase( keyField.getFieldName(), keyField.getDataType() ); keyMeta.setLength( keyField.getLength() ); batchIndex.addValueMeta( keyMeta ); indexes.add( batchIndex ); } // The next index includes : ERRORS, STATUS, JOBNAME: RowMetaInterface lookupIndex = new RowMeta(); LogTableField errorsField = findField( ID.ERRORS ); if ( errorsField != null ) { ValueMetaInterface valueMeta = new ValueMetaBase( errorsField.getFieldName(), errorsField.getDataType() ); valueMeta.setLength( errorsField.getLength() ); lookupIndex.addValueMeta( valueMeta ); } LogTableField statusField = findField( ID.STATUS ); if ( statusField != null ) { ValueMetaInterface valueMeta = new ValueMetaBase( statusField.getFieldName(), statusField.getDataType() ); valueMeta.setLength( statusField.getLength() ); lookupIndex.addValueMeta( valueMeta ); } LogTableField transNameField = findField( ID.JOBNAME ); if ( transNameField != null ) { ValueMetaInterface valueMeta = new ValueMetaBase( transNameField.getFieldName(), transNameField.getDataType() ); valueMeta.setLength( transNameField.getLength() ); lookupIndex.addValueMeta( valueMeta ); } indexes.add( lookupIndex ); return indexes; } @Override public void setAllGlobalParametersToNull() { boolean clearGlobalVariables = Boolean.valueOf( System.getProperties().getProperty( Const.KETTLE_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT, "false" ) ); if ( clearGlobalVariables ) { super.setAllGlobalParametersToNull(); logInterval = isGlobalParameter( logInterval ) ? null : logInterval; logSizeLimit = isGlobalParameter( logSizeLimit ) ? null : logSizeLimit; } } }
package com.eden.orchid.api.publication; import com.caseyjbrooks.clog.Clog; import com.eden.orchid.api.OrchidContext; import com.eden.orchid.api.options.OptionsExtractor; import com.eden.orchid.testhelpers.BaseOrchidTest; import org.json.JSONObject; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import static org.hamcrest.MatcherAssert.*; import static org.hamcrest.Matchers.*; import static org.mockito.Mockito.*; public final class PublicationPipelineTest extends BaseOrchidTest { private OrchidContext context; private OptionsExtractor extractor; private PublicationPipeline underTest; private Set<OrchidPublisher> publishers; private MockPublisher.CrashingPublisher crashingPublisher; private MockPublisher.InvalidPublisher invalidPublisher; private MockPublisher.ValidPublisher validPublisher; private int progressUpdates; // Progress is considered complete when progress == maxProgress private boolean didProgressComplete; private BiConsumer<Integer, Integer> progressHandler; @BeforeEach public void setUp() { super.setUp(); progressUpdates = 0; didProgressComplete = false; progressHandler = (progress, maxProgress) -> { progressUpdates++; didProgressComplete = didProgressComplete || (progress.equals(maxProgress)); Clog.d("Progress: {}/{}", progress, maxProgress); }; context = mock(OrchidContext.class); extractor = mock(OptionsExtractor.class); when(context.resolve(OptionsExtractor.class)).thenReturn(extractor); publishers = new HashSet<>(); crashingPublisher = new MockPublisher.CrashingPublisher(context); invalidPublisher = new MockPublisher.InvalidPublisher(context); validPublisher = new MockPublisher.ValidPublisher(context); publishers.add(crashingPublisher); publishers.add(invalidPublisher); publishers.add(validPublisher); when(context.resolveSet(OrchidPublisher.class)).thenReturn(publishers); crashingPublisher = spy(crashingPublisher); invalidPublisher = spy(invalidPublisher); validPublisher = spy(validPublisher); when(context.resolve(MockPublisher.CrashingPublisher.class)).thenReturn(crashingPublisher); when(context.resolve(MockPublisher.InvalidPublisher.class)).thenReturn(invalidPublisher); when(context.resolve(MockPublisher.ValidPublisher.class)).thenReturn(validPublisher); underTest = new PublicationPipeline(context); } @Test public void testSetupCorrectly() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"crashing\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"invalid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); underTest.publishAll(true); verify(crashingPublisher, times(1)).validate(); verify(invalidPublisher, times(1)).validate(); verify(validPublisher, times(1)).validate(); } @Test public void testPipelineStopsShortWhenStageIsInvalid() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"crashing\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"invalid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(); verify(crashingPublisher, times(1)).validate(); verify(invalidPublisher, times(1)).validate(); verify(validPublisher, times(1)).validate(); verify(crashingPublisher, never()).publish(); verify(invalidPublisher, never()).publish(); verify(validPublisher, never()).publish(); assertThat(success, is(false)); } @Test public void testPipelineStopsShortWhenStageThrows() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"crashing\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(); verify(crashingPublisher, times(1)).validate(); verify(validPublisher, times(1)).validate(); verify(crashingPublisher, times(1)).publish(); verify(validPublisher, times(0)).publish(); assertThat(success, is(false)); } @Test public void testPublishedWhenNotDry() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(); verify(validPublisher, times(1)).validate(); verify(validPublisher, times(1)).publish(); assertThat(success, is(true)); } @Test public void testNotPublishedWhenDry() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(true); verify(validPublisher, times(1)).validate(); verify(validPublisher, times(0)).publish(); assertThat(success, is(true)); } @Test public void testNotPublishedWhenPublisherIsDry() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\", \"dry\": true}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(); verify(validPublisher, times(1)).validate(); verify(validPublisher, times(0)).publish(); assertThat(success, is(true)); } @Test public void testNotPublishedWhenFailedValidation() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"invalid\"}").toMap()); underTest.initialize(stagesJson); boolean success = underTest.publishAll(); verify(invalidPublisher, times(1)).validate(); verify(invalidPublisher, times(0)).publish(); assertThat(success, is(false)); } @Test public void testProgressUpdatesForValidPublishers() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); underTest.publishAll(false, progressHandler); verify(validPublisher, times(3)).validate(); // updates once at beginning for zero progress, then once for each subsequent progress assertThat(progressUpdates, is(equalTo(4))); assertThat(didProgressComplete, is(true)); } @Test public void testProgressUpdatesForInvalidPublishers() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"invalid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); underTest.publishAll(false, progressHandler); verify(validPublisher, times(2)).validate(); verify(invalidPublisher, times(1)).validate(); // updates once at beginning for zero progress, and once for completion, but does not update for each publisher assertThat(progressUpdates, is(equalTo(2))); assertThat(didProgressComplete, is(true)); } @Test public void testProgressUpdatesForCrashingPublishers() { List<Map<String, Object>> stagesJson = new ArrayList<>(); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"crashing\"}").toMap()); stagesJson.add(new JSONObject("{\"type\": \"valid\"}").toMap()); underTest.initialize(stagesJson); underTest.publishAll(false, progressHandler); verify(validPublisher, times(2)).validate(); verify(crashingPublisher, times(1)).validate(); // updates once at beginning for zero progress, and once for completion, and once for each successful stage // deploy, but a progress update is not sent when a stage crashes, as the pipeline exits early and sends the // final 'completion' update immediately. assertThat(progressUpdates, is(equalTo(2))); assertThat(didProgressComplete, is(true)); } }
/* * Copyright (c) 2015 JBYoshi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jbyoshi.blockdodge; import java.awt.*; import java.awt.geom.*; import java.util.*; public abstract class DodgeShape { protected final BlockDodgeGame game; protected final Random rand = new Random(); private final Color color; private static final float DROP_SCALE = 0.25f; private int dropCount = 0; final Rectangle2D.Double shape; public DodgeShape(BlockDodgeGame game, double x, double y, double w, double h, Color c) { this.game = game; this.shape = new Rectangle2D.Double(x, y, w, h); this.color = c; } protected abstract void move(); public void explode() { int width = (int) shape.getWidth(); int height = (int) shape.getHeight(); if (width <= 0 || height <= 0) { // No room to explode. Just cancel. game.remove(this); return; } boolean[][] used = new boolean[width][height]; int maxArea = (int) (width * height * DROP_SCALE); while (true) { int dropX = -1, dropY = -1; findPos: for (int x = 0; x < used.length; x++) { for (int y = 0; y < used[x].length; y++) { if (!used[x][y]) { dropX = x; dropY = y; break findPos; } } } if (dropX == -1 || dropY == -1) { break; } int dropX2 = dropX, dropY2 = dropY; while ((dropX2 - dropX) * (dropY2 - dropY) < maxArea) { if (rand.nextBoolean()) { // Expand the width if (used.length == dropX2 + 1) { break; } for (int y = dropY; y <= dropY2; y++) { if (used[dropX2][y]) { break; } } dropX2++; } else { // Expand the height if (used[0].length == dropY2 + 1) { break; } for (int x = dropX; x <= dropX2; x++) { if (used[x][dropY2]) { break; } } dropY2++; } } for (int x = dropX; x <= dropX2; x++) { for (int y = dropY; y <= dropY2; y++) { used[x][y] = true; } } float dir = (float) (rand.nextFloat() * 2 * Math.PI); game.add(new Drop(dropX, dropY, dropX2, dropY2, dir, 1)); } game.remove(this); } protected void onCollided(DodgeShape other) { explode(); } protected void onDeath() { } protected void onFullyRemoved() { } protected final class Drop extends BounceDodgeShape { private int time = 0; protected Drop(int x1, int y1, int x2, int y2, float dir, double speed) { super(DodgeShape.this.game, DodgeShape.this.getX() + x1, DodgeShape.this.getY() + y1, x2 - x1 + 1, y2 - y1 + 1, DodgeShape.this.getColor(), dir, speed); DodgeShape.this.dropCount++; } @Override public void move() { super.move(); if (++time % 5 == 0) { if (getWidth() == 0 && getHeight() == 0) { game.remove(this); return; } int change = rand.nextInt((int) (getWidth() + getHeight())); if (change < getWidth()) { if (rand.nextBoolean()) { setX(getX() + 1); } setWidth(this, getWidth() - 1); } else { if (rand.nextBoolean()) { setY(getY() + 1); } setHeight(this, getHeight() - 1); } } } @Override protected void onCollided(DodgeShape other) { if (other instanceof Drop && ((Drop) other).outer() == outer()) { return; } super.onCollided(other); } private DodgeShape outer() { return DodgeShape.this; } @Override public void explode() { game.remove(this); } @Override protected void onFullyRemoved() { if (--DodgeShape.this.dropCount == 0) { DodgeShape.this.onFullyRemoved(); } } } public int getDropCount() { return dropCount; } public double getX() { return shape.getX(); } public double getY() { return shape.getY(); } public double getWidth() { return shape.getWidth(); } public double getHeight() { return shape.getHeight(); } public final Rectangle2D getShapeCopy() { return new Rectangle2D.Double(getX(), getY(), getWidth(), getHeight()); } public Color getColor() { return color; } protected void setX(double x) { shape.x = x; } protected void setY(double y) { shape.y = y; } public boolean collides(DodgeShape other) { return game.contains(this) && game.contains(other) && shape.intersects(other.shape); } // To get around compiler insanity. Used by Drop. static void setWidth(DodgeShape shape, double width) { shape.shape.width = width; } static void setHeight(DodgeShape shape, double height) { shape.shape.height = height; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.theoryinpractice.testng.inspection; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInspection.BaseJavaLocalInspectionTool; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.ProjectScope; import com.intellij.psi.util.InheritanceUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author Bas Leijdekkers */ public class ExpectedExceptionNeverThrownTestNGInspection extends BaseJavaLocalInspectionTool { @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { return new ExpectedExceptionNeverThrownVisitor(holder); } private static class ExpectedExceptionNeverThrownVisitor extends JavaElementVisitor { private final ProblemsHolder myProblemsHolder; public ExpectedExceptionNeverThrownVisitor(ProblemsHolder problemsHolder) { myProblemsHolder = problemsHolder; } @Override public void visitMethod(PsiMethod method) { super.visitMethod(method); final PsiAnnotation annotation = AnnotationUtil.findAnnotation(method, "org.testng.annotations.Test"); if (annotation == null) { return; } final PsiAnnotationParameterList parameterList = annotation.getParameterList(); final PsiNameValuePair[] attributes = parameterList.getAttributes(); PsiAnnotationMemberValue value = null; for (PsiNameValuePair attribute : attributes) { if ("expectedExceptions".equals(attribute.getName())) { value = attribute.getValue(); break; } } if (!(value instanceof PsiClassObjectAccessExpression)) { return; } final PsiCodeBlock body = method.getBody(); if (body == null) { return; } final PsiClassObjectAccessExpression classObjectAccessExpression = (PsiClassObjectAccessExpression)value; final PsiTypeElement operand = classObjectAccessExpression.getOperand(); final PsiType type = operand.getType(); if (!(type instanceof PsiClassType)) { return; } final PsiClassType classType = (PsiClassType)type; final PsiClass aClass = classType.resolve(); if (InheritanceUtil.isInheritor(aClass, CommonClassNames.JAVA_LANG_RUNTIME_EXCEPTION)) { return; } final Set<PsiClassType> exceptionsThrown = calculateExceptionsThrown(body); if (exceptionsThrown.contains(classType)) { return; } myProblemsHolder.registerProblem(operand, "Expected <code>#ref</code> never thrown in body of '" + method.getName() + "()' #loc"); } } @NotNull public static Set<PsiClassType> calculateExceptionsThrown(@NotNull PsiElement element) { final ExceptionsThrownVisitor visitor = new ExceptionsThrownVisitor(); element.accept(visitor); return visitor.getExceptionsThrown(); } private static class ExceptionsThrownVisitor extends JavaRecursiveElementVisitor { private final Set<PsiClassType> m_exceptionsThrown = new HashSet<PsiClassType>(4); @Override public void visitMethodCallExpression(@NotNull PsiMethodCallExpression expression) { super.visitMethodCallExpression(expression); final PsiMethod method = expression.resolveMethod(); collectExceptionsThrown(method, m_exceptionsThrown); } @Override public void visitNewExpression(@NotNull PsiNewExpression expression) { super.visitNewExpression(expression); final PsiMethod method = expression.resolveMethod(); collectExceptionsThrown(method, m_exceptionsThrown); } @Override public void visitThrowStatement(PsiThrowStatement statement) { super.visitThrowStatement(statement); final PsiExpression exception = statement.getException(); if (exception == null) { return; } final PsiType type = exception.getType(); if (!(type instanceof PsiClassType)) { return; } m_exceptionsThrown.add((PsiClassType)type); } @Override public void visitTryStatement(@NotNull PsiTryStatement statement) { final Set<PsiType> exceptionsHandled = getExceptionTypesHandled(statement); final PsiResourceList resourceList = statement.getResourceList(); if (resourceList != null) { final List<PsiResourceVariable> resourceVariables = resourceList.getResourceVariables(); for (PsiResourceVariable resourceVariable : resourceVariables) { final Set<PsiClassType> resourceExceptions = calculateExceptionsThrown(resourceVariable); final PsiType type = resourceVariable.getType(); if (type instanceof PsiClassType) { final PsiClassType classType = (PsiClassType)type; collectExceptionsThrown(findAutoCloseableCloseMethod(classType.resolve()), resourceExceptions); } for (PsiClassType resourceException : resourceExceptions) { if (!isExceptionHandled(exceptionsHandled, resourceException)) { m_exceptionsThrown.add(resourceException); } } } } final PsiCodeBlock tryBlock = statement.getTryBlock(); if (tryBlock != null) { final Set<PsiClassType> tryExceptions = calculateExceptionsThrown(tryBlock); for (PsiClassType tryException : tryExceptions) { if (!isExceptionHandled(exceptionsHandled, tryException)) { m_exceptionsThrown.add(tryException); } } } final PsiCodeBlock finallyBlock = statement.getFinallyBlock(); if (finallyBlock != null) { final Set<PsiClassType> finallyExceptions = calculateExceptionsThrown(finallyBlock); m_exceptionsThrown.addAll(finallyExceptions); } final PsiCodeBlock[] catchBlocks = statement.getCatchBlocks(); for (PsiCodeBlock catchBlock : catchBlocks) { final Set<PsiClassType> catchExceptions = calculateExceptionsThrown(catchBlock); m_exceptionsThrown.addAll(catchExceptions); } } private static void collectExceptionsThrown(@Nullable PsiMethod method, @NotNull Set<PsiClassType> out) { if (method == null) { return; } final PsiElementFactory factory = JavaPsiFacade.getElementFactory(method.getProject()); final PsiJavaCodeReferenceElement[] referenceElements = method.getThrowsList().getReferenceElements(); for (PsiJavaCodeReferenceElement referenceElement : referenceElements) { final PsiClass exceptionClass = (PsiClass)referenceElement.resolve(); if (exceptionClass != null) { out.add(factory.createType(exceptionClass)); } } } @Nullable private static PsiMethod findAutoCloseableCloseMethod(@Nullable PsiClass aClass) { if (aClass == null || !InheritanceUtil.isInheritor(aClass, CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE)) { return null; } final Project project = aClass.getProject(); final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); final PsiClass autoCloseable = facade.findClass(CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE, ProjectScope.getLibrariesScope(project)); if (autoCloseable == null) { return null; } final PsiMethod closeMethod = autoCloseable.findMethodsByName("close", false)[0]; return aClass.findMethodBySignature(closeMethod, true); } private static boolean isExceptionHandled(Iterable<PsiType> exceptionsHandled, PsiType thrownType) { for (PsiType exceptionHandled : exceptionsHandled) { if (exceptionHandled.isAssignableFrom(thrownType)) { return true; } } return false; } private static Set<PsiType> getExceptionTypesHandled(@NotNull PsiTryStatement statement) { final Set<PsiType> out = new HashSet<PsiType>(5); final PsiParameter[] parameters = statement.getCatchBlockParameters(); for (PsiParameter parameter : parameters) { final PsiType type = parameter.getType(); out.add(type); } return out; } @NotNull public Set<PsiClassType> getExceptionsThrown() { return m_exceptionsThrown; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.config.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/config-2014-11-12/DescribeRemediationExecutionStatus" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeRemediationExecutionStatusRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * A list of AWS Config rule names. * </p> */ private String configRuleName; /** * <p> * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * </p> */ private com.amazonaws.internal.SdkInternalList<ResourceKey> resourceKeys; /** * <p> * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you specify * 0, AWS Config uses the default. * </p> */ private Integer limit; /** * <p> * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results in a * paginated response. * </p> */ private String nextToken; /** * <p> * A list of AWS Config rule names. * </p> * * @param configRuleName * A list of AWS Config rule names. */ public void setConfigRuleName(String configRuleName) { this.configRuleName = configRuleName; } /** * <p> * A list of AWS Config rule names. * </p> * * @return A list of AWS Config rule names. */ public String getConfigRuleName() { return this.configRuleName; } /** * <p> * A list of AWS Config rule names. * </p> * * @param configRuleName * A list of AWS Config rule names. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeRemediationExecutionStatusRequest withConfigRuleName(String configRuleName) { setConfigRuleName(configRuleName); return this; } /** * <p> * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * </p> * * @return A list of resource keys to be processed with the current request. Each element in the list consists of * the resource type and resource ID. */ public java.util.List<ResourceKey> getResourceKeys() { if (resourceKeys == null) { resourceKeys = new com.amazonaws.internal.SdkInternalList<ResourceKey>(); } return resourceKeys; } /** * <p> * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * </p> * * @param resourceKeys * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. */ public void setResourceKeys(java.util.Collection<ResourceKey> resourceKeys) { if (resourceKeys == null) { this.resourceKeys = null; return; } this.resourceKeys = new com.amazonaws.internal.SdkInternalList<ResourceKey>(resourceKeys); } /** * <p> * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setResourceKeys(java.util.Collection)} or {@link #withResourceKeys(java.util.Collection)} if you want to * override the existing values. * </p> * * @param resourceKeys * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeRemediationExecutionStatusRequest withResourceKeys(ResourceKey... resourceKeys) { if (this.resourceKeys == null) { setResourceKeys(new com.amazonaws.internal.SdkInternalList<ResourceKey>(resourceKeys.length)); } for (ResourceKey ele : resourceKeys) { this.resourceKeys.add(ele); } return this; } /** * <p> * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * </p> * * @param resourceKeys * A list of resource keys to be processed with the current request. Each element in the list consists of the * resource type and resource ID. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeRemediationExecutionStatusRequest withResourceKeys(java.util.Collection<ResourceKey> resourceKeys) { setResourceKeys(resourceKeys); return this; } /** * <p> * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you specify * 0, AWS Config uses the default. * </p> * * @param limit * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you * specify 0, AWS Config uses the default. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you specify * 0, AWS Config uses the default. * </p> * * @return The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you * specify 0, AWS Config uses the default. */ public Integer getLimit() { return this.limit; } /** * <p> * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you specify * 0, AWS Config uses the default. * </p> * * @param limit * The maximum number of RemediationExecutionStatuses returned on each page. The default is maximum. If you * specify 0, AWS Config uses the default. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeRemediationExecutionStatusRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * <p> * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results in a * paginated response. * </p> * * @param nextToken * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results * in a paginated response. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results in a * paginated response. * </p> * * @return The <code>nextToken</code> string returned on a previous page that you use to get the next page of * results in a paginated response. */ public String getNextToken() { return this.nextToken; } /** * <p> * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results in a * paginated response. * </p> * * @param nextToken * The <code>nextToken</code> string returned on a previous page that you use to get the next page of results * in a paginated response. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeRemediationExecutionStatusRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getConfigRuleName() != null) sb.append("ConfigRuleName: ").append(getConfigRuleName()).append(","); if (getResourceKeys() != null) sb.append("ResourceKeys: ").append(getResourceKeys()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeRemediationExecutionStatusRequest == false) return false; DescribeRemediationExecutionStatusRequest other = (DescribeRemediationExecutionStatusRequest) obj; if (other.getConfigRuleName() == null ^ this.getConfigRuleName() == null) return false; if (other.getConfigRuleName() != null && other.getConfigRuleName().equals(this.getConfigRuleName()) == false) return false; if (other.getResourceKeys() == null ^ this.getResourceKeys() == null) return false; if (other.getResourceKeys() != null && other.getResourceKeys().equals(this.getResourceKeys()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getConfigRuleName() == null) ? 0 : getConfigRuleName().hashCode()); hashCode = prime * hashCode + ((getResourceKeys() == null) ? 0 : getResourceKeys().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public DescribeRemediationExecutionStatusRequest clone() { return (DescribeRemediationExecutionStatusRequest) super.clone(); } }
package org.deeplearning4j.optimize.listeners; import lombok.Getter; import lombok.NonNull; import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.deeplearning4j.eval.Evaluation; import org.deeplearning4j.eval.IEvaluation; import org.deeplearning4j.exception.DL4JInvalidInputException; import org.deeplearning4j.nn.api.Model; import org.deeplearning4j.nn.graph.ComputationGraph; import org.deeplearning4j.nn.multilayer.MultiLayerNetwork; import org.deeplearning4j.optimize.api.InvocationType; import org.deeplearning4j.optimize.api.TrainingListener; import org.deeplearning4j.optimize.listeners.callbacks.EvaluationCallback; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.dataset.DataSet; import org.nd4j.linalg.dataset.MultiDataSet; import org.nd4j.linalg.dataset.api.iterator.DataSetIterator; import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; /** * This TrainingListener implementation provides simple way for model evaluation during training. * It can be launched every Xth Iteration/Epoch, depending on frequency and InvocationType constructor arguments * * * @author raver119@gmail.com */ @Slf4j public class EvaluativeListener implements TrainingListener { protected transient ThreadLocal<AtomicLong> iterationCount = new ThreadLocal<>(); protected int frequency; protected AtomicLong invocationCount = new AtomicLong(0); protected transient DataSetIterator dsIterator; protected transient MultiDataSetIterator mdsIterator; protected DataSet ds; protected MultiDataSet mds; @Getter protected IEvaluation[] evaluations; @Getter protected InvocationType invocationType; /** * This callback will be invoked after evaluation finished */ @Getter @Setter protected transient EvaluationCallback callback; /** * Evaluation will be launched after each *frequency* iteration * @param iterator * @param frequency */ public EvaluativeListener(@NonNull DataSetIterator iterator, int frequency) { this(iterator, frequency, InvocationType.ITERATION_END, new Evaluation()); } public EvaluativeListener(@NonNull DataSetIterator iterator, int frequency, @NonNull InvocationType type) { this(iterator, frequency, type, new Evaluation()); } /** * Evaluation will be launched after each *frequency* iteration * @param iterator * @param frequency */ public EvaluativeListener(@NonNull MultiDataSetIterator iterator, int frequency) { this(iterator, frequency, InvocationType.ITERATION_END, new Evaluation()); } public EvaluativeListener(@NonNull MultiDataSetIterator iterator, int frequency, @NonNull InvocationType type) { this(iterator, frequency, type, new Evaluation()); } /** * Evaluation will be launched after each *frequency* iteration * * @param iterator * @param frequency */ public EvaluativeListener(@NonNull DataSetIterator iterator, int frequency, IEvaluation... evaluations) { this(iterator, frequency, InvocationType.ITERATION_END, evaluations); } public EvaluativeListener(@NonNull DataSetIterator iterator, int frequency, @NonNull InvocationType type, IEvaluation... evaluations) { this.dsIterator = iterator; this.frequency = frequency; this.evaluations = evaluations; this.invocationType = type; } /** * Evaluation will be launched after each *frequency* iteration * @param iterator * @param frequency */ public EvaluativeListener(@NonNull MultiDataSetIterator iterator, int frequency, IEvaluation... evaluations) { this(iterator, frequency, InvocationType.ITERATION_END, evaluations); } public EvaluativeListener(@NonNull MultiDataSetIterator iterator, int frequency, @NonNull InvocationType type, IEvaluation... evaluations) { this.mdsIterator = iterator; this.frequency = frequency; this.evaluations = evaluations; this.invocationType = type; } public EvaluativeListener(@NonNull DataSet dataSet, int frequency, @NonNull InvocationType type) { this(dataSet, frequency, type, new Evaluation()); } public EvaluativeListener(@NonNull MultiDataSet multiDataSet, int frequency, @NonNull InvocationType type) { this(multiDataSet, frequency, type, new Evaluation()); } public EvaluativeListener(@NonNull DataSet dataSet, int frequency, @NonNull InvocationType type, IEvaluation... evaluations) { this.ds = dataSet; this.frequency = frequency; this.evaluations = evaluations; this.invocationType = type; } public EvaluativeListener(@NonNull MultiDataSet multiDataSet, int frequency, @NonNull InvocationType type, IEvaluation... evaluations) { this.mds = multiDataSet; this.frequency = frequency; this.evaluations = evaluations; this.invocationType = type; } /** * Event listener for each iteration * * @param model the model iterating * @param iteration the iteration */ @Override public void iterationDone(Model model, int iteration, int epoch) { // no-op } @Override public void onEpochStart(Model model) { if (invocationType == InvocationType.EPOCH_START) invokeListener(model); } @Override public void onEpochEnd(Model model) { if (invocationType == InvocationType.EPOCH_END) invokeListener(model); } @Override public void onForwardPass(Model model, List<INDArray> activations) { // no-op } @Override public void onForwardPass(Model model, Map<String, INDArray> activations) { // no-op } @Override public void onGradientCalculation(Model model) { // no-op } @Override public void onBackwardPass(Model model) { if (invocationType == InvocationType.ITERATION_END) invokeListener(model); } protected void invokeListener(Model model) { if (iterationCount.get() == null) iterationCount.set(new AtomicLong(0)); if (iterationCount.get().getAndIncrement() % frequency != 0) return; for (IEvaluation evaluation : evaluations) evaluation.reset(); if (dsIterator != null && dsIterator.resetSupported()) dsIterator.reset(); else if (mdsIterator != null && mdsIterator.resetSupported()) mdsIterator.reset(); // FIXME: we need to save/restore inputs, if we're being invoked with iterations > 1 log.info("Starting evaluation nr. {}", invocationCount.incrementAndGet()); if (model instanceof MultiLayerNetwork) { if (dsIterator != null) { ((MultiLayerNetwork) model).doEvaluation(dsIterator, evaluations); } else if (ds != null) { for (IEvaluation evaluation : evaluations) evaluation.eval(ds.getLabels(), ((MultiLayerNetwork) model).output(ds.getFeatureMatrix())); } } else if (model instanceof ComputationGraph) { if (dsIterator != null) { ((ComputationGraph) model).doEvaluation(dsIterator, evaluations); } else if (mdsIterator != null) { ((ComputationGraph) model).doEvaluation(mdsIterator, evaluations); } else if (ds != null) { for (IEvaluation evaluation : evaluations) evalAtIndex(evaluation, new INDArray[] {ds.getLabels()}, ((ComputationGraph) model).output(ds.getFeatureMatrix()), 0); } else if (mds != null) { for (IEvaluation evaluation : evaluations) evalAtIndex(evaluation, mds.getLabels(), ((ComputationGraph) model).output(mds.getFeatures()), 0); } } else throw new DL4JInvalidInputException("Model is unknown: " + model.getClass().getCanonicalName()); // TODO: maybe something better should be used here? log.info("Reporting evaluation results:"); for (IEvaluation evaluation : evaluations) log.info("{}:\n{}", evaluation.getClass().getSimpleName(), evaluation.stats()); if (callback != null) callback.call(this, model, invocationCount.get(), evaluations); } protected void evalAtIndex(IEvaluation evaluation, INDArray[] labels, INDArray[] predictions, int index) { evaluation.eval(labels[index], predictions[index]); } }
/* * Copyright 2014 Red Hat, Inc. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package examples; import io.vertx.core.Vertx; import io.vertx.core.json.JsonArray; import io.vertx.core.json.JsonObject; import io.vertx.docgen.Source; import io.vertx.ext.mongo.FindOptions; import io.vertx.ext.mongo.MongoClient; import io.vertx.ext.mongo.UpdateOptions; import org.bson.types.ObjectId; import java.util.List; /** * @author <a href="http://tfox.org">Tim Fox</a> */ public class MongoClientExamples { public void exampleCreateDefault(Vertx vertx, JsonObject config) { MongoClient client = MongoClient.createShared(vertx, config); } public void exampleCreatePoolName(Vertx vertx, JsonObject config) { MongoClient client = MongoClient.createShared(vertx, config, "MyPoolName"); } public void exampleCreateNonShared(Vertx vertx, JsonObject config) { MongoClient client = MongoClient.createNonShared(vertx, config); } public void example1(MongoClient mongoClient) { // Document has no id JsonObject document = new JsonObject() .put("title", "The Hobbit"); mongoClient.save("books", document, res -> { if (res.succeeded()) { String id = res.result(); System.out.println("Saved book with id " + id); } else { res.cause().printStackTrace(); } }); } public void example2(MongoClient mongoClient) { // Document has an id already JsonObject document = new JsonObject() .put("title", "The Hobbit") .put("_id", "123244"); mongoClient.save("books", document, res -> { if (res.succeeded()) { // ... } else { res.cause().printStackTrace(); } }); } public void example3(MongoClient mongoClient) { // Document has an id already JsonObject document = new JsonObject() .put("title", "The Hobbit"); mongoClient.insert("books", document, res -> { if (res.succeeded()) { String id = res.result(); System.out.println("Inserted book with id " + id); } else { res.cause().printStackTrace(); } }); } public void example4(MongoClient mongoClient) { // Document has an id already JsonObject document = new JsonObject() .put("title", "The Hobbit") .put("_id", "123244"); mongoClient.insert("books", document, res -> { if (res.succeeded()) { //... } else { // Will fail if the book with that id already exists. } }); } public void example5(MongoClient mongoClient) { // Match any documents with title=The Hobbit JsonObject query = new JsonObject() .put("title", "The Hobbit"); // Set the author field JsonObject update = new JsonObject().put("$set", new JsonObject() .put("author", "J. R. R. Tolkien")); mongoClient.updateCollection("books", query, update, res -> { if (res.succeeded()) { System.out.println("Book updated !"); } else { res.cause().printStackTrace(); } }); } public void example6(MongoClient mongoClient) { // Match any documents with title=The Hobbit JsonObject query = new JsonObject() .put("title", "The Hobbit"); // Set the author field JsonObject update = new JsonObject().put("$set", new JsonObject() .put("author", "J. R. R. Tolkien")); UpdateOptions options = new UpdateOptions().setMulti(true); mongoClient.updateCollectionWithOptions("books", query, update, options, res -> { if (res.succeeded()) { System.out.println("Book updated !"); } else { res.cause().printStackTrace(); } }); } public void example7(MongoClient mongoClient) { JsonObject query = new JsonObject() .put("title", "The Hobbit"); JsonObject replace = new JsonObject() .put("title", "The Lord of the Rings") .put("author", "J. R. R. Tolkien"); mongoClient.replaceDocuments("books", query, replace, res -> { if (res.succeeded()) { System.out.println("Book replaced !"); } else { res.cause().printStackTrace(); } }); } public void example8(MongoClient mongoClient) { // empty query = match any JsonObject query = new JsonObject(); mongoClient.find("books", query, res -> { if (res.succeeded()) { for (JsonObject json : res.result()) { System.out.println(json.encodePrettily()); } } else { res.cause().printStackTrace(); } }); } public void example9(MongoClient mongoClient) { // will match all Tolkien books JsonObject query = new JsonObject() .put("author", "J. R. R. Tolkien"); mongoClient.find("books", query, res -> { if (res.succeeded()) { for (JsonObject json : res.result()) { System.out.println(json.encodePrettily()); } } else { res.cause().printStackTrace(); } }); } public void findBatch(MongoClient mongoClient) { // will match all Tolkien books JsonObject query = new JsonObject() .put("author", "J. R. R. Tolkien"); mongoClient.findBatch("book", query) .exceptionHandler(throwable -> throwable.printStackTrace()) .endHandler(v -> System.out.println("End of research")) .handler(doc -> System.out.println("Found doc: " + doc.encodePrettily())); } public void findBatchWithOptions(MongoClient mongoClient) { // will match all Tolkien books JsonObject query = new JsonObject() .put("author", "J. R. R. Tolkien"); FindOptions options = new FindOptions().setBatchSize(100); mongoClient.findBatchWithOptions("book", query, options) .exceptionHandler(throwable -> throwable.printStackTrace()) .endHandler(v -> System.out.println("End of research")) .handler(doc -> System.out.println("Found doc: " + doc.encodePrettily())); } public void example10(MongoClient mongoClient) { JsonObject query = new JsonObject() .put("author", "J. R. R. Tolkien"); mongoClient.removeDocuments("books", query, res -> { if (res.succeeded()) { System.out.println("Never much liked Tolkien stuff!"); } else { res.cause().printStackTrace(); } }); } public void example11(MongoClient mongoClient) { JsonObject query = new JsonObject() .put("author", "J. R. R. Tolkien"); mongoClient.count("books", query, res -> { if (res.succeeded()) { long num = res.result(); } else { res.cause().printStackTrace(); } }); } public void example11_1(MongoClient mongoClient) { mongoClient.getCollections(res -> { if (res.succeeded()) { List<String> collections = res.result(); } else { res.cause().printStackTrace(); } }); } public void example11_2(MongoClient mongoClient) { mongoClient.createCollection("mynewcollectionr", res -> { if (res.succeeded()) { // Created ok! } else { res.cause().printStackTrace(); } }); } public void example11_3(MongoClient mongoClient) { mongoClient.dropCollection("mynewcollectionr", res -> { if (res.succeeded()) { // Dropped ok! } else { res.cause().printStackTrace(); } }); } public void example12(MongoClient mongoClient) { JsonObject command = new JsonObject() .put("aggregate", "collection_name") .put("pipeline", new JsonArray()); mongoClient.runCommand("aggregate", command, res -> { if (res.succeeded()) { JsonArray resArr = res.result().getJsonArray("result"); // etc } else { res.cause().printStackTrace(); } }); } public void example13_0(MongoClient mongoService) { JsonObject document = new JsonObject() .put("title", "The Hobbit") //ISO-8601 date .put("publicationDate", new JsonObject().put("$date", "1937-09-21T00:00:00+00:00")); mongoService.save("publishedBooks", document, res -> { if (res.succeeded()) { String id = res.result(); mongoService.findOne("publishedBooks", new JsonObject().put("_id", id), null, res2 -> { if (res2.succeeded()) { System.out.println("To retrieve ISO-8601 date : " + res2.result().getJsonObject("publicationDate").getString("$date")); } else { res2.cause().printStackTrace(); } }); } else { res.cause().printStackTrace(); } }); } @Source(translate = false) public void example14_01_dl(MongoClient mongoService) { //This could be a serialized object or the contents of a pdf file, etc, in real life byte[] binaryObject = new byte[40]; JsonObject document = new JsonObject() .put("name", "Alan Turing") .put("binaryStuff", new JsonObject().put("$binary", binaryObject)); mongoService.save("smartPeople", document, res -> { if (res.succeeded()) { String id = res.result(); mongoService.findOne("smartPeople", new JsonObject().put("_id", id), null, res2 -> { if (res2.succeeded()) { byte[] reconstitutedBinaryObject = res2.result().getJsonObject("binaryStuff").getBinary("$binary"); //This could now be de-serialized into an object in real life } else { res2.cause().printStackTrace(); } }); } else { res.cause().printStackTrace(); } }); } public void example14_02_dl(MongoClient mongoService) { //This could be a the byte contents of a pdf file, etc converted to base 64 String base64EncodedString = "a2FpbHVhIGlzIHRoZSAjMSBiZWFjaCBpbiB0aGUgd29ybGQ="; JsonObject document = new JsonObject() .put("name", "Alan Turing") .put("binaryStuff", new JsonObject().put("$binary", base64EncodedString)); mongoService.save("smartPeople", document, res -> { if (res.succeeded()) { String id = res.result(); mongoService.findOne("smartPeople", new JsonObject().put("_id", id), null, res2 -> { if (res2.succeeded()) { String reconstitutedBase64EncodedString = res2.result().getJsonObject("binaryStuff").getString("$binary"); //This could now converted back to bytes from the base 64 string } else { res2.cause().printStackTrace(); } }); } else { res.cause().printStackTrace(); } }); } public void example15_dl(MongoClient mongoService) { String individualId = new ObjectId().toHexString(); JsonObject document = new JsonObject() .put("name", "Stephen Hawking") .put("individualId", new JsonObject().put("$oid", individualId)); mongoService.save("smartPeople", document, res -> { if (res.succeeded()) { String id = res.result(); JsonObject query = new JsonObject().put("_id", id); mongoService.findOne("smartPeople", query, null, res2 -> { if (res2.succeeded()) { String reconstitutedIndividualId = res2.result() .getJsonObject("individualId").getString("$oid"); } else { res2.cause().printStackTrace(); } }); } else { res.cause().printStackTrace(); } }); } public void example16(MongoClient mongoClient) { JsonObject document = new JsonObject() .put("title", "The Hobbit"); mongoClient.save("books", document, res -> { if (res.succeeded()) { mongoClient.distinct("books", "title", String.class.getName(), res2 -> { System.out.println("Title is : " + res2.result().getJsonArray(0)); }); } else { res.cause().printStackTrace(); } }); } public void example16_d1(MongoClient mongoClient) { JsonObject document = new JsonObject() .put("title", "The Hobbit"); mongoClient.save("books", document, res -> { if (res.succeeded()) { mongoClient.distinctBatch("books", "title", String.class.getName()) .handler(book -> System.out.println("Title is : " + book.getString("title"))); } else { res.cause().printStackTrace(); } }); } public void example17(MongoClient mongoClient) { JsonObject document = new JsonObject() .put("title", "The Hobbit") .put("publicationDate", new JsonObject().put("$date", "1937-09-21T00:00:00+00:00")); JsonObject query = new JsonObject() .put("publicationDate", new JsonObject().put("$gte", new JsonObject().put("$date", "1937-09-21T00:00:00+00:00"))); mongoClient.save("books", document, res -> { if (res.succeeded()) { mongoClient.distinctWithQuery("books", "title", String.class.getName(), query, res2 -> { System.out.println("Title is : " + res2.result().getJsonArray(0)); }); } }); } public void example17_d1(MongoClient mongoClient) { JsonObject document = new JsonObject() .put("title", "The Hobbit") .put("publicationDate", new JsonObject().put("$date", "1937-09-21T00:00:00+00:00")); JsonObject query = new JsonObject() .put("publicationDate", new JsonObject() .put("$gte", new JsonObject().put("$date", "1937-09-21T00:00:00+00:00"))); mongoClient.save("books", document, res -> { if (res.succeeded()) { mongoClient.distinctBatchWithQuery("books", "title", String.class.getName(), query) .handler(book -> System.out.println("Title is : " + book.getString("title"))); } }); } }
/*************************************************************************** * * * Organization: Lawrence Livermore National Lab (LLNL) * * Directorate: Computation * * Department: Computing Applications and Research * * Division: S&T Global Security * * Matrix: Atmospheric, Earth and Energy Division * * Program: PCMDI * * Project: Earth Systems Grid Federation (ESGF) Data Node Software * * First Author: Gavin M. Bell (gavin@llnl.gov) * * * **************************************************************************** * * * Copyright (c) 2009, Lawrence Livermore National Security, LLC. * * Produced at the Lawrence Livermore National Laboratory * * Written by: Gavin M. Bell (gavin@llnl.gov) * * LLNL-CODE-420962 * * * * All rights reserved. This file is part of the: * * Earth System Grid Federation (ESGF) Data Node Software Stack * * * * For details, see http://esgf.org/esg-node/ * * Please also read this link * * http://esgf.org/LICENSE * * * * * Redistribution and use in source and binary forms, with or * * without modification, are permitted provided that the following * * conditions are met: * * * * * Redistributions of source code must retain the above copyright * * notice, this list of conditions and the disclaimer below. * * * * * Redistributions in binary form must reproduce the above copyright * * notice, this list of conditions and the disclaimer (as noted below) * * in the documentation and/or other materials provided with the * * distribution. * * * * Neither the name of the LLNS/LLNL nor the names of its contributors * * may be used to endorse or promote products derived from this * * software without specific prior written permission. * * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL LAWRENCE * * LIVERMORE NATIONAL SECURITY, LLC, THE U.S. DEPARTMENT OF ENERGY OR * * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * * SUCH DAMAGE. * * * ***************************************************************************/ package esg.node.security.shell.cmds; /** Description: ESGF's "realize" command..." This command takes a dataset directory and inspects its catalog to find missing files (files listed in the dataset catalog but not locally present on the filesystem) and brings them local. The second half of the 'replication' process - for a single dataset. **/ import esg.common.util.*; import esg.common.shell.*; import esg.common.shell.cmds.*; import esg.node.security.*; import org.apache.commons.cli.*; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.impl.*; public class ESGFuseradd extends ESGFSecurityCommand { private static Log log = LogFactory.getLog(ESGFuseradd.class); public ESGFuseradd() { super(); } public void init(ESGFEnv env) { checkPermission(env); } public String getCommandName() { return "useradd"; } public void doInitOptions() { getOptions().addOption("n", "no-prompt", false, "do not ask for confirmation"); Option firstname = OptionBuilder.withArgName("firstname") .hasArg(true) .withDescription("First name of user") .withLongOpt("firstname") .isRequired(true) .create("fn"); getOptions().addOption(firstname); Option middlename = OptionBuilder.withArgName("middlename") .hasArg(true) .withDescription("Middle name of user") .withLongOpt("middlename") .create("mn"); getOptions().addOption(middlename); Option lastname = OptionBuilder.withArgName("lastname") .hasArg(true) .withDescription("Last name of user") .withLongOpt("lastname") .isRequired(true) .create("ln"); getOptions().addOption(lastname); Option email = OptionBuilder.withArgName("email") .hasArg(true) .withDescription("Email address of user") .withLongOpt("email") .isRequired(true) .create("e"); getOptions().addOption(email); Option organization = OptionBuilder.withArgName("organization") .hasArg(true) .withDescription("Organization name of user") .withLongOpt("organization") .create("o"); getOptions().addOption(organization); Option city = OptionBuilder.withArgName("city") .hasArg(true) .withDescription("City of user") .withLongOpt("city") .create("c"); getOptions().addOption(city); Option state = OptionBuilder.withArgName("state") .hasArgs() .withDescription("State of user") .withLongOpt("state") .create("st"); getOptions().addOption(state); Option country = OptionBuilder.withArgName("country") .hasArg(true) .withDescription("First name of user") .withLongOpt("country") .create("cn"); getOptions().addOption(country); Option openid = OptionBuilder.withArgName("openid") .hasArg(true) .withDescription("OpenID of user") .withLongOpt("openid") .create("oid"); getOptions().addOption(openid); } public ESGFEnv doEval(CommandLine line, ESGFEnv env) { log.trace("inside the \"useradd\" command's doEval"); //------------------ //Collect args... //------------------ //Scrubbing... (need to go into cli code and toss in some regex's to clean this type of shit up) java.util.List<String> argsList = new java.util.ArrayList<String>(); String[] args = null; for(String arg : line.getArgs()) { if(!arg.isEmpty()) { argsList.add(arg); } } args = argsList.toArray(new String[]{}); String username = null; if(args.length > 0) { username = args[0]; env.getWriter().println("user to create is: ["+username+"]"); env.getWriter().flush(); }else { throw new esg.common.ESGRuntimeException("You must provide the username for this account"); } String firstname = null; if(line.hasOption( "fn" )) { firstname = line.getOptionValue( "fn" ); env.getWriter().println("firstname: ["+firstname+"]"); } String middlename = null; if(line.hasOption( "mn" )) { middlename = line.getOptionValue( "mn" ); env.getWriter().println("middlename: ["+middlename+"]"); } String lastname = null; if(line.hasOption( "ln" )) { lastname = line.getOptionValue( "ln" ); env.getWriter().println("lastname: ["+lastname+"]"); } String email = null; if(line.hasOption( "e" )) { email = line.getOptionValue( "e" ); env.getWriter().println("email: ["+email+"]"); } String organization = null; if(line.hasOption( "o" )) { organization = line.getOptionValue( "o" ); env.getWriter().println("organization: ["+organization+"]"); } String city = null; if(line.hasOption( "c" )) { city = line.getOptionValue( "c" ); env.getWriter().println("city: ["+city+"]"); } String state = null; if(line.hasOption( "st" )) { state = line.getOptionValue( "st" ); env.getWriter().println("state: ["+state+"]"); } String country = null; if(line.hasOption( "cn" )) { country = line.getOptionValue( "cn" ); env.getWriter().println("country: ["+country+"]"); } String openid = null; if(line.hasOption( "oid" )) { openid = line.getOptionValue( "oid" ); env.getWriter().println("openid: ["+openid+"]"); } //Don't burn cycles if don't need to... if(log.isInfoEnabled()) { int i=0; for(String arg : line.getArgs()) { log.info("arg("+(i++)+"): "+arg); } } //Check for required fields... //NOTE: this is a work around until I can figure out to exempt --help. //You should be allowed to call --help regardless if 'required' fields are not present! //because then how will know what the fields are, etc. There may be some special handling //that I am not doing propertly in the super class when assigning the help option (perhaps?). -gavin if ((firstname == null) || (lastname == null) || (email == null) ) { env.getWriter().println("Missing Required Options: -fn, -ln, -e. Please see --help for more info"); env.getWriter().flush(); return env; } //------------------ //NOW DO SOME LOGIC //------------------ boolean noPrompt = false; if(line.hasOption( "n" )) { noPrompt = true; } env.getWriter().flush(); if(!noPrompt) { try{ String answer = env.getReader().readLine("Is this information correct and ready to be submitted? [Y/n] > "); if(!answer.equals("") && !answer.equalsIgnoreCase("y")) { return env; } }catch(java.io.IOException e) { System.err.println(e.getMessage()); } } //------------------ //Check access privs and setup resource object //------------------ UserInfoCredentialedDAO userDAO = null; if (!(userDAO = getUserDAO(env)).checkCredentials()) { userDAO = null; throw new ESGFCommandPermissionException("Credentials are not sufficient, sorry..."); } //------------------ UserInfo user = null; if(username != null) user = userDAO.getUserById(username); if(null == user) { throw new esg.common.ESGRuntimeException("Sorry, your username ["+username+"] was not well formed"); } if(user.isValid()) { throw new esg.common.ESGRuntimeException("Apparently user ["+username+"] is already present in the system! (to modify run usermod)"); }else { //Manditory fields... user.setUserName(username); user.setFirstName(firstname); user.setLastName(lastname); user.setEmail(email); //Optional fields... if (null != middlename) user.setMiddleName(middlename); if (null != organization) user.setOrganization(organization); if (null != city) user.setCity(city); if (null != state) user.setState(state); if (null != country) user.setCountry(country); if (null != openid) user.setOpenid(openid); //Adding to database if(userDAO.addUserInfo(user)){ env.getWriter().println("[OK]"); }else { env.getWriter().println("[FAILED]"); } env.getWriter().println(user); //place holder for when doing group / role impl. //if (null != ) user.addPermission("CMIP5_test","admin"); //if (null != ) user.addPermission("CMIP5_test","user_test"); //if (null != ) user.addPermission("ARM_test","user_test"); } env.getWriter().flush(); //------------------ return env; } }
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.util.indexing.containers; import org.jetbrains.annotations.NotNull; import java.util.Arrays; import java.util.function.IntPredicate; public final class SortedIdSet implements Cloneable, RandomAccessIntContainer { private int[] mySet; private int mySetLength; private int mySize; public SortedIdSet(final int initialCapacity) { assert initialCapacity < Short.MAX_VALUE; mySet = new int[initialCapacity]; // todo slightly increase size } public SortedIdSet(final int[] array, int size) { mySet = array; mySetLength = mySize = size; } public boolean isEmpty() { return mySize == 0; } @Override public int size() { return mySize; } @Override public boolean add(int value) { assert value > 0; int pos; if (mySetLength == 0 || mySetLength > 0 && Math.abs(mySet[mySetLength - 1]) < value) { pos = -mySetLength-1; // most of the time during bulk indexing we add near the end } else { pos = binarySearch(mySet, 0, mySetLength, value); } if (pos >= 0) { if (mySet[pos] > 0) return false; pos = -pos - 1; // found removed } if (mySetLength == mySet.length) { int nextArraySize = mySet.length < 1024 ? mySet.length << 1 : mySet.length + mySet.length / 5; mySet = Arrays.copyOf(mySet, nextArraySize); } pos = -pos - 1; boolean lengthIsIncreased = pos == mySetLength; // insert at end if (!lengthIsIncreased && Math.abs(mySet[pos]) != value) { // todo we can shift until first removed System.arraycopy(mySet, pos, mySet, pos + 1, mySetLength - pos); lengthIsIncreased = true; } mySet[pos] = value; ++mySize; if (lengthIsIncreased) ++mySetLength; return true; } @Override public boolean remove(int value) { assert value > 0; int pos = binarySearch(mySet, 0, mySetLength, value); if (pos < 0 || mySet[pos] < 0) return false; mySet[pos] = -value; //if (pos != mySetLength - 1) System.arraycopy(mySet, pos + 1, mySet, pos, mySetLength - pos - 1); --mySize; //--mySetLength; return true; } @Override public @NotNull IntIdsIterator intIterator() { return new Iterator(); } private final class Iterator implements IntIdsIterator { private int myCursor; Iterator() { myCursor = findNext(0); } @Override public boolean hasNext() { return myCursor != -1; } @Override public int next() { int result = get(myCursor); myCursor = findNext(myCursor + 1); return result; } @Override public int size() { return SortedIdSet.this.size(); } @Override public boolean hasAscendingOrder() { return true; } @Override public IntIdsIterator createCopyInInitialState() { return new Iterator(); } } private static int binarySearch(final int[] set, int startOffset, int endOffset, int key) { int low = startOffset; int high = endOffset - 1; while (low <= high) { int mid = low + high >>> 1; int midVal = Math.abs(set[mid]); if (midVal < key) low = mid + 1; else if (midVal > key) high = mid - 1; else return mid; // key found } return -(low + 1); // key not found. } public void forEach(IntPredicate procedure) { for (int i = 0; i < mySetLength; ++i) { int value = mySet[i]; if (value > 0 && !procedure.test(value)) { break; } } } @Override public boolean contains(int value) { if (value <= 0) { return false; } int pos = binarySearch(mySet, 0, mySetLength, value); return pos >= 0 && mySet[pos] > 0; } @Override public Object clone() { try { SortedIdSet set = (SortedIdSet)super.clone(); set.mySet = mySet.clone(); return set; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } @Override public void compact() { if(2 * mySize < mySetLength && mySetLength > 5) { int positivePosition = -1; for(int i = 0; i < mySetLength; ++i) { if (mySet[i] < 0) { while(i < mySetLength && mySet[i] < 0) ++i; if (i == mySetLength) { break; } else { mySet[++positivePosition] = mySet[i]; } } else { ++positivePosition; if (i != positivePosition) mySet[positivePosition] = mySet[i]; } } // todo slightly decrease size mySetLength = (short)(positivePosition + 1); } } @Override public @NotNull RandomAccessIntContainer ensureContainerCapacity(int count) { int newSize = mySetLength + count; if (newSize < mySet.length) return this; if (newSize > ChangeBufferingList.MAX_FILES) { return new IdBitSet(this, count); } newSize = ChangeBufferingList.calcNextArraySize(mySet.length, newSize); assert newSize < Short.MAX_VALUE; int[] newSet = new int[newSize]; // todo slightly increase size and compact System.arraycopy(mySet, 0, newSet, 0, mySetLength); mySet = newSet; return this; } public int findNext(int i) { while(i < mySetLength) { if (mySet[i] > 0) return i; ++i; } return -1; } public int get(int cursor) { assert cursor < mySetLength; int value = mySet[cursor]; assert value > 0; return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.query; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.UUID; import javax.cache.Cache; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteBinary; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteDataStreamer; import org.apache.ignite.IgniteException; import org.apache.ignite.binary.BinaryObject; import org.apache.ignite.cache.affinity.AffinityFunction; import org.apache.ignite.cache.affinity.AffinityFunctionContext; import org.apache.ignite.cache.query.QueryCursor; import org.apache.ignite.cache.query.ScanQuery; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.cluster.ClusterState; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.failure.StopNodeOrHaltFailureHandler; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.cluster.NodeOrderComparator; import org.apache.ignite.lang.IgniteBiPredicate; import org.apache.ignite.lang.IgniteClosure; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static org.apache.ignite.cache.CacheMode.LOCAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtPartitionState.EVICTED; /** * ScanQuery failover test. Tests scenario where user supplied closures throw unhandled errors. */ public class CacheScanQueryFailoverTest extends GridCommonAbstractTest { /** */ private static final String LOCAL_CACHE_NAME = "local"; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); stopAllGrids(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); super.afterTest(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String name) throws Exception { IgniteConfiguration cfg = super.getConfiguration(name); cfg.setConsistentId(name); cfg.setFailureHandler(new StopNodeOrHaltFailureHandler()); return cfg; } /** * @throws Exception If failed. */ @Test public void testScanQueryWithFailedClosures() throws Exception { Ignite srv = startGridsMultiThreaded(4); Ignite client = startClientGrid("client"); CacheConfiguration cfg = new CacheConfiguration(DEFAULT_CACHE_NAME).setCacheMode(PARTITIONED); // Test query from client node. queryCachesWithFailedPredicates(client, cfg); // Test query from server node. queryCachesWithFailedPredicates(srv, cfg); assertEquals(client.cluster().nodes().size(), 5); } /** * @throws Exception If failed. */ @Test public void testScanQueryOverLocalCacheWithFailedClosures() throws Exception { Ignite srv = startGridsMultiThreaded(4); queryCachesWithFailedPredicates(srv, new CacheConfiguration(LOCAL_CACHE_NAME).setCacheMode(LOCAL)); assertEquals(srv.cluster().nodes().size(), 4); } /** * Test scan query when partitions are concurrently evicting. */ @Test public void testScanQueryOnEvictedPartition() throws Exception { cleanPersistenceDir(); DataStorageConfiguration dsCfg = new DataStorageConfiguration().setDefaultDataRegionConfiguration( new DataRegionConfiguration().setPersistenceEnabled(true)); IgniteEx grid0 = startGrid(getConfiguration("grid0").setDataStorageConfiguration(dsCfg)); grid0.cluster().state(ClusterState.ACTIVE); IgniteCache<Integer, Integer> cache1 = grid0.getOrCreateCache( new CacheConfiguration<Integer, Integer>("cache1") .setAffinity(new RoundRobinAffinityFunction(2)) ); IgniteCache<Integer, Integer> cache2 = grid0.getOrCreateCache( new CacheConfiguration<Integer, Integer>("cache2") .setAffinity(new RoundRobinAffinityFunction(2)) ); cache1.put(0, 0); // Put to partition 0. cache1.put(1, 1); // Put to partition 1. cache2.put(0, 0); // Put to partition 0. for (int i = 1; i < 1_000; i += 2) cache2.put(i, i); // Put to partition 1. Iterator iter1 = cache1.query(new ScanQuery<>().setPageSize(1)).iterator(); Iterator iter2 = cache1.query(new ScanQuery<>().setPageSize(1)).iterator(); // Iter 1 check case, when cursor is switched to evicted partition. iter1.next(); // Iter 2 check case, when cursor already moving by partition and this partition is evicted. iter2.next(); iter2.next(); startGrid(getConfiguration("grid1").setDataStorageConfiguration(dsCfg)); grid0.cluster().setBaselineTopology(grid0.cluster().topologyVersion()); // Wait for rebalance and evition of partition 1 to grid 1 for each cache. awaitPartitionMapExchange(); assertTrue(GridTestUtils.waitForCondition(() -> grid0.cachex("cache1").context().topology().localPartition(1).state() == EVICTED && grid0.cachex("cache2").context().topology().localPartition(1).state() == EVICTED, 1_000L)); // Force checkpoint to destroy evicted partitions store. forceCheckpoint(grid0); GridTestUtils.assertThrowsAnyCause(log, iter1::next, IgniteException.class, "Failed to get next data row"); GridTestUtils.assertThrowsAnyCause(log, () -> { while (iter2.hasNext()) iter2.next(); return null; }, IgniteException.class, "Failed to get next data row"); } /** * @param ignite Ignite instance. * @param configs Cache configurations. */ private void queryCachesWithFailedPredicates(Ignite ignite, CacheConfiguration... configs) { if (configs == null) return; for (CacheConfiguration cfg: configs) { IgniteCache cache = ignite.getOrCreateCache(cfg); populateCache(ignite, cache.getName()); // Check that exception propagates to client from filter failure. GridTestUtils.assertThrowsAnyCause(log, () -> { try (QueryCursor<Cache.Entry<Integer, BinaryObject>> cursor = cache.withKeepBinary().query(new ScanQuery<>(filter))) { for (Cache.Entry<Integer, BinaryObject> entry : cursor) log.info("Entry " + entry.toString()); } return null; }, Error.class, "Poison pill"); // Check that exception propagates to client from transformer failure. GridTestUtils.assertThrowsAnyCause(log, () -> { try (QueryCursor<Cache.Entry<Integer, BinaryObject>> cursor = cache.withKeepBinary().query(new ScanQuery<>(), transformer)) { for (Cache.Entry<Integer, BinaryObject> entry : cursor) log.info("Entry " + entry.toString()); } return null; }, Error.class, "Poison pill"); } } /** * @param ignite Ignite instance. * @param cacheName Cache name. */ private void populateCache(Ignite ignite, String cacheName) { IgniteBinary binary = ignite.binary(); try (IgniteDataStreamer<Object, Object> streamer = ignite.dataStreamer(cacheName)) { for (int i = 0; i < 1_000; i++) streamer.addData(i, binary.builder("type_name").setField("f_" + i, "v_" + i).build()); } } /** Failed filter. */ private static IgniteBiPredicate<Integer, BinaryObject> filter = (key, value) -> { throw new Error("Poison pill"); }; /** Failed entry transformer. */ private static IgniteClosure<Cache.Entry<Integer, BinaryObject>, Cache.Entry<Integer, BinaryObject>> transformer = integerBinaryObjectEntry -> { throw new Error("Poison pill"); }; /** * Affinity function to distribute partitions by round robin to each node. */ private static class RoundRobinAffinityFunction implements AffinityFunction { /** Partitions count. */ private final int partitions; /** * @param partitions Partitions count. */ public RoundRobinAffinityFunction(int partitions) { this.partitions = partitions; } /** {@inheritDoc} */ @Override public void reset() { // No-op. } /** {@inheritDoc} */ @Override public int partitions() { return partitions; } /** {@inheritDoc} */ @Override public int partition(Object key) { return key.hashCode() % partitions; } /** {@inheritDoc} */ @Override public List<List<ClusterNode>> assignPartitions(AffinityFunctionContext affCtx) { List<List<ClusterNode>> res = new ArrayList<>(partitions); List<ClusterNode> nodes = affCtx.currentTopologySnapshot(); nodes.sort(NodeOrderComparator.getInstance()); for (int i = 0; i < partitions; i++) res.add(Collections.singletonList(nodes.get(i % nodes.size()))); return res; } /** {@inheritDoc} */ @Override public void removeNode(UUID nodeId) { // No-op. } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster.service; import com.google.common.collect.Iterables; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.operation.OperationRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.*; import java.util.concurrent.*; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; /** * */ public class InternalClusterService extends AbstractLifecycleComponent<ClusterService> implements ClusterService { public static final String UPDATE_THREAD_NAME = "clusterService#updateTask"; private final ThreadPool threadPool; private final DiscoveryService discoveryService; private final OperationRouting operationRouting; private final TransportService transportService; private final NodeSettingsService nodeSettingsService; private final DiscoveryNodeService discoveryNodeService; private final Version version; private final TimeValue reconnectInterval; private volatile PrioritizedEsThreadPoolExecutor updateTasksExecutor; /** * Those 3 state listeners are changing infrequently - CopyOnWriteArrayList is just fine */ private final Collection<ClusterStateListener> priorityClusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> clusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>(); // TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>(); private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat( priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners); private final LocalNodeMasterListeners localNodeMasterListeners; private final Queue<NotifyTimeout> onGoingTimeouts = ConcurrentCollections.newQueue(); private volatile ClusterState clusterState; private final ClusterBlocks.Builder initialBlocks; private volatile ScheduledFuture reconnectToNodes; @Inject public InternalClusterService(Settings settings, DiscoveryService discoveryService, OperationRouting operationRouting, TransportService transportService, NodeSettingsService nodeSettingsService, ThreadPool threadPool, ClusterName clusterName, DiscoveryNodeService discoveryNodeService, Version version) { super(settings); this.operationRouting = operationRouting; this.transportService = transportService; this.discoveryService = discoveryService; this.threadPool = threadPool; this.nodeSettingsService = nodeSettingsService; this.discoveryNodeService = discoveryNodeService; this.version = version; // will be replaced on doStart. this.clusterState = ClusterState.builder(clusterName).build(); this.nodeSettingsService.setClusterService(this); this.reconnectInterval = componentSettings.getAsTime("reconnect_interval", TimeValue.timeValueSeconds(10)); localNodeMasterListeners = new LocalNodeMasterListeners(threadPool); initialBlocks = ClusterBlocks.builder().addGlobalBlock(discoveryService.getNoMasterBlock()); } public NodeSettingsService settingsService() { return this.nodeSettingsService; } public void addInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException { if (lifecycle.started()) { throw new ElasticsearchIllegalStateException("can't set initial block when started"); } initialBlocks.addGlobalBlock(block); } @Override public void removeInitialStateBlock(ClusterBlock block) throws ElasticsearchIllegalStateException { if (lifecycle.started()) { throw new ElasticsearchIllegalStateException("can't set initial block when started"); } initialBlocks.removeGlobalBlock(block); } @Override protected void doStart() throws ElasticsearchException { add(localNodeMasterListeners); this.clusterState = ClusterState.builder(clusterState).blocks(initialBlocks).build(); this.updateTasksExecutor = EsExecutors.newSinglePrioritizing(daemonThreadFactory(settings, UPDATE_THREAD_NAME)); this.reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, new ReconnectToNodes()); Map<String, String> nodeAttributes = discoveryNodeService.buildAttributes(); // note, we rely on the fact that its a new id each time we start, see FD and "kill -9" handling final String nodeId = DiscoveryService.generateNodeId(settings); DiscoveryNode localNode = new DiscoveryNode(settings.get("name"), nodeId, transportService.boundAddress().publishAddress(), nodeAttributes, version); DiscoveryNodes.Builder nodeBuilder = DiscoveryNodes.builder().put(localNode).localNodeId(localNode.id()); this.clusterState = ClusterState.builder(clusterState).nodes(nodeBuilder).blocks(initialBlocks).build(); } @Override protected void doStop() throws ElasticsearchException { FutureUtils.cancel(this.reconnectToNodes); for (NotifyTimeout onGoingTimeout : onGoingTimeouts) { onGoingTimeout.cancel(); onGoingTimeout.listener.onClose(); } updateTasksExecutor.shutdown(); try { updateTasksExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { // ignore } remove(localNodeMasterListeners); } @Override protected void doClose() throws ElasticsearchException { } @Override public DiscoveryNode localNode() { return clusterState.getNodes().localNode(); } @Override public OperationRouting operationRouting() { return operationRouting; } public ClusterState state() { return this.clusterState; } public void addFirst(ClusterStateListener listener) { priorityClusterStateListeners.add(listener); } public void addLast(ClusterStateListener listener) { lastClusterStateListeners.add(listener); } public void add(ClusterStateListener listener) { clusterStateListeners.add(listener); } public void remove(ClusterStateListener listener) { clusterStateListeners.remove(listener); priorityClusterStateListeners.remove(listener); lastClusterStateListeners.remove(listener); postAppliedListeners.remove(listener); for (Iterator<NotifyTimeout> it = onGoingTimeouts.iterator(); it.hasNext(); ) { NotifyTimeout timeout = it.next(); if (timeout.listener.equals(listener)) { timeout.cancel(); it.remove(); } } } @Override public void add(LocalNodeMasterListener listener) { localNodeMasterListeners.add(listener); } @Override public void remove(LocalNodeMasterListener listener) { localNodeMasterListeners.remove(listener); } public void add(final TimeValue timeout, final TimeoutClusterStateListener listener) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); return; } // call the post added notification on the same event thread try { updateTasksExecutor.execute(new PrioritizedRunnable(Priority.HIGH) { @Override public void run() { NotifyTimeout notifyTimeout = new NotifyTimeout(listener, timeout); notifyTimeout.future = threadPool.schedule(timeout, ThreadPool.Names.GENERIC, notifyTimeout); onGoingTimeouts.add(notifyTimeout); postAppliedListeners.add(listener); listener.postAdded(); } }); } catch (EsRejectedExecutionException e) { if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { throw e; } } } public void submitStateUpdateTask(final String source, final ClusterStateUpdateTask updateTask) { submitStateUpdateTask(source, Priority.NORMAL, updateTask); } public void submitStateUpdateTask(final String source, Priority priority, final ClusterStateUpdateTask updateTask) { if (!lifecycle.started()) { return; } try { final UpdateTask task = new UpdateTask(source, priority, updateTask); if (updateTask instanceof TimeoutClusterStateUpdateTask) { final TimeoutClusterStateUpdateTask timeoutUpdateTask = (TimeoutClusterStateUpdateTask) updateTask; updateTasksExecutor.execute(task, threadPool.scheduler(), timeoutUpdateTask.timeout(), new Runnable() { @Override public void run() { threadPool.generic().execute(new Runnable() { @Override public void run() { timeoutUpdateTask.onFailure(task.source, new ProcessClusterEventTimeoutException(timeoutUpdateTask.timeout(), task.source)); } }); } }); } else { updateTasksExecutor.execute(task); } } catch (EsRejectedExecutionException e) { // ignore cases where we are shutting down..., there is really nothing interesting // to be done here... if (!lifecycle.stoppedOrClosed()) { throw e; } } } @Override public List<PendingClusterTask> pendingTasks() { long now = System.currentTimeMillis(); PrioritizedEsThreadPoolExecutor.Pending[] pendings = updateTasksExecutor.getPending(); List<PendingClusterTask> pendingClusterTasks = new ArrayList<>(pendings.length); for (PrioritizedEsThreadPoolExecutor.Pending pending : pendings) { final String source; final long timeInQueue; if (pending.task instanceof UpdateTask) { UpdateTask updateTask = (UpdateTask) pending.task; source = updateTask.source; timeInQueue = now - updateTask.addedAt; } else { source = "unknown"; timeInQueue = -1; } pendingClusterTasks.add(new PendingClusterTask(pending.insertionOrder, pending.priority, new StringText(source), timeInQueue, pending.executing)); } return pendingClusterTasks; } class UpdateTask extends PrioritizedRunnable { public final String source; public final ClusterStateUpdateTask updateTask; public final long addedAt = System.currentTimeMillis(); UpdateTask(String source, Priority priority, ClusterStateUpdateTask updateTask) { super(priority); this.source = source; this.updateTask = updateTask; } @Override public void run() { if (!lifecycle.started()) { logger.debug("processing [{}]: ignoring, cluster_service not started", source); return; } logger.debug("processing [{}]: execute", source); ClusterState previousClusterState = clusterState; if (!previousClusterState.nodes().localNodeMaster() && updateTask.runOnlyOnMaster()) { logger.debug("failing [{}]: local node is no longer master", source); updateTask.onNoLongerMaster(source); return; } ClusterState newClusterState; try { newClusterState = updateTask.execute(previousClusterState); } catch (Throwable e) { if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("failed to execute cluster state update, state:\nversion [").append(previousClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(previousClusterState.nodes().prettyPrint()); sb.append(previousClusterState.routingTable().prettyPrint()); sb.append(previousClusterState.readOnlyRoutingNodes().prettyPrint()); logger.trace(sb.toString(), e); } updateTask.onFailure(source, e); return; } if (previousClusterState == newClusterState) { logger.debug("processing [{}]: no change in cluster_state", source); if (updateTask instanceof AckedClusterStateUpdateTask) { //no need to wait for ack if nothing changed, the update can be counted as acknowledged ((AckedClusterStateUpdateTask) updateTask).onAllNodesAcked(null); } if (updateTask instanceof ProcessedClusterStateUpdateTask) { ((ProcessedClusterStateUpdateTask) updateTask).clusterStateProcessed(source, previousClusterState, newClusterState); } return; } try { Discovery.AckListener ackListener = new NoOpAckListener(); if (newClusterState.nodes().localNodeMaster()) { // only the master controls the version numbers Builder builder = ClusterState.builder(newClusterState).version(newClusterState.version() + 1); if (previousClusterState.routingTable() != newClusterState.routingTable()) { builder.routingTable(RoutingTable.builder(newClusterState.routingTable()).version(newClusterState.routingTable().version() + 1)); } if (previousClusterState.metaData() != newClusterState.metaData()) { builder.metaData(MetaData.builder(newClusterState.metaData()).version(newClusterState.metaData().version() + 1)); } newClusterState = builder.build(); if (updateTask instanceof AckedClusterStateUpdateTask) { final AckedClusterStateUpdateTask ackedUpdateTask = (AckedClusterStateUpdateTask) updateTask; if (ackedUpdateTask.ackTimeout() == null || ackedUpdateTask.ackTimeout().millis() == 0) { ackedUpdateTask.onAckTimeout(); } else { try { ackListener = new AckCountDownListener(ackedUpdateTask, newClusterState.version(), newClusterState.nodes(), threadPool); } catch (EsRejectedExecutionException ex) { if (logger.isDebugEnabled()) { logger.debug("Couldn't schedule timeout thread - node might be shutting down", ex); } //timeout straightaway, otherwise we could wait forever as the timeout thread has not started ackedUpdateTask.onAckTimeout(); } } } } newClusterState.status(ClusterState.ClusterStateStatus.BEING_APPLIED); if (logger.isTraceEnabled()) { StringBuilder sb = new StringBuilder("cluster state updated, source [").append(source).append("]\n"); sb.append(newClusterState.prettyPrint()); logger.trace(sb.toString()); } else if (logger.isDebugEnabled()) { logger.debug("cluster state updated, version [{}], source [{}]", newClusterState.version(), source); } ClusterChangedEvent clusterChangedEvent = new ClusterChangedEvent(source, newClusterState, previousClusterState); // new cluster state, notify all listeners final DiscoveryNodes.Delta nodesDelta = clusterChangedEvent.nodesDelta(); if (nodesDelta.hasChanges() && logger.isInfoEnabled()) { String summary = nodesDelta.shortSummary(); if (summary.length() > 0) { logger.info("{}, reason: {}", summary, source); } } // TODO, do this in parallel (and wait) for (DiscoveryNode node : nodesDelta.addedNodes()) { if (!nodeRequiresConnection(node)) { continue; } try { transportService.connectToNode(node); } catch (Throwable e) { // the fault detection will detect it as failed as well logger.warn("failed to connect to node [" + node + "]", e); } } // if we are the master, publish the new state to all nodes // we publish here before we send a notification to all the listeners, since if it fails // we don't want to notify if (newClusterState.nodes().localNodeMaster()) { logger.debug("publishing cluster state version {}", newClusterState.version()); discoveryService.publish(newClusterState, ackListener); } // update the current cluster state clusterState = newClusterState; logger.debug("set local cluster state to version {}", newClusterState.version()); for (ClusterStateListener listener : preAppliedListeners) { try { listener.clusterChanged(clusterChangedEvent); } catch (Exception ex) { logger.warn("failed to notify ClusterStateListener", ex); } } for (DiscoveryNode node : nodesDelta.removedNodes()) { try { transportService.disconnectFromNode(node); } catch (Throwable e) { logger.warn("failed to disconnect to node [" + node + "]", e); } } newClusterState.status(ClusterState.ClusterStateStatus.APPLIED); for (ClusterStateListener listener : postAppliedListeners) { try { listener.clusterChanged(clusterChangedEvent); } catch (Exception ex) { logger.warn("failed to notify ClusterStateListener", ex); } } //manual ack only from the master at the end of the publish if (newClusterState.nodes().localNodeMaster()) { try { ackListener.onNodeAck(newClusterState.nodes().localNode(), null); } catch (Throwable t) { logger.debug("error while processing ack for master node [{}]", t, newClusterState.nodes().localNode()); } } if (updateTask instanceof ProcessedClusterStateUpdateTask) { ((ProcessedClusterStateUpdateTask) updateTask).clusterStateProcessed(source, previousClusterState, newClusterState); } logger.debug("processing [{}]: done applying updated cluster_state (version: {})", source, newClusterState.version()); } catch (Throwable t) { StringBuilder sb = new StringBuilder("failed to apply updated cluster state:\nversion [").append(newClusterState.version()).append("], source [").append(source).append("]\n"); sb.append(newClusterState.nodes().prettyPrint()); sb.append(newClusterState.routingTable().prettyPrint()); sb.append(newClusterState.readOnlyRoutingNodes().prettyPrint()); logger.warn(sb.toString(), t); // TODO: do we want to call updateTask.onFailure here? } } } class NotifyTimeout implements Runnable { final TimeoutClusterStateListener listener; final TimeValue timeout; ScheduledFuture future; NotifyTimeout(TimeoutClusterStateListener listener, TimeValue timeout) { this.listener = listener; this.timeout = timeout; } public void cancel() { FutureUtils.cancel(future); } @Override public void run() { if (future.isCancelled()) { return; } if (lifecycle.stoppedOrClosed()) { listener.onClose(); } else { listener.onTimeout(this.timeout); } // note, we rely on the listener to remove itself in case of timeout if needed } } private class ReconnectToNodes implements Runnable { private ConcurrentMap<DiscoveryNode, Integer> failureCount = ConcurrentCollections.newConcurrentMap(); @Override public void run() { // master node will check against all nodes if its alive with certain discoveries implementations, // but we can't rely on that, so we check on it as well for (DiscoveryNode node : clusterState.nodes()) { if (lifecycle.stoppedOrClosed()) { return; } if (!nodeRequiresConnection(node)) { continue; } if (clusterState.nodes().nodeExists(node.id())) { // we double check existence of node since connectToNode might take time... if (!transportService.nodeConnected(node)) { try { transportService.connectToNode(node); } catch (Exception e) { if (lifecycle.stoppedOrClosed()) { return; } if (clusterState.nodes().nodeExists(node.id())) { // double check here as well, maybe its gone? Integer nodeFailureCount = failureCount.get(node); if (nodeFailureCount == null) { nodeFailureCount = 1; } else { nodeFailureCount = nodeFailureCount + 1; } // log every 6th failure if ((nodeFailureCount % 6) == 0) { // reset the failure count... nodeFailureCount = 0; logger.warn("failed to reconnect to node {}", e, node); } failureCount.put(node, nodeFailureCount); } } } } } // go over and remove failed nodes that have been removed DiscoveryNodes nodes = clusterState.nodes(); for (Iterator<DiscoveryNode> failedNodesIt = failureCount.keySet().iterator(); failedNodesIt.hasNext(); ) { DiscoveryNode failedNode = failedNodesIt.next(); if (!nodes.nodeExists(failedNode.id())) { failedNodesIt.remove(); } } if (lifecycle.started()) { reconnectToNodes = threadPool.schedule(reconnectInterval, ThreadPool.Names.GENERIC, this); } } } private boolean nodeRequiresConnection(DiscoveryNode node) { return localNode().shouldConnectTo(node); } private static class LocalNodeMasterListeners implements ClusterStateListener { private final List<LocalNodeMasterListener> listeners = new CopyOnWriteArrayList<>(); private final ThreadPool threadPool; private volatile boolean master = false; private LocalNodeMasterListeners(ThreadPool threadPool) { this.threadPool = threadPool; } @Override public void clusterChanged(ClusterChangedEvent event) { if (!master && event.localNodeMaster()) { master = true; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OnMasterRunnable(listener)); } return; } if (master && !event.localNodeMaster()) { master = false; for (LocalNodeMasterListener listener : listeners) { Executor executor = threadPool.executor(listener.executorName()); executor.execute(new OffMasterRunnable(listener)); } } } private void add(LocalNodeMasterListener listener) { listeners.add(listener); } private void remove(LocalNodeMasterListener listener) { listeners.remove(listener); } private void clear() { listeners.clear(); } } private static class OnMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OnMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.onMaster(); } } private static class OffMasterRunnable implements Runnable { private final LocalNodeMasterListener listener; private OffMasterRunnable(LocalNodeMasterListener listener) { this.listener = listener; } @Override public void run() { listener.offMaster(); } } private static class NoOpAckListener implements Discovery.AckListener { @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { } @Override public void onTimeout() { } } private static class AckCountDownListener implements Discovery.AckListener { private static final ESLogger logger = Loggers.getLogger(AckCountDownListener.class); private final AckedClusterStateUpdateTask ackedUpdateTask; private final CountDown countDown; private final DiscoveryNodes nodes; private final long clusterStateVersion; private final Future<?> ackTimeoutCallback; private Throwable lastFailure; AckCountDownListener(AckedClusterStateUpdateTask ackedUpdateTask, long clusterStateVersion, DiscoveryNodes nodes, ThreadPool threadPool) { this.ackedUpdateTask = ackedUpdateTask; this.clusterStateVersion = clusterStateVersion; this.nodes = nodes; int countDown = 0; for (DiscoveryNode node : nodes) { if (ackedUpdateTask.mustAck(node)) { countDown++; } } //we always wait for at least 1 node (the master) countDown = Math.max(1, countDown); logger.trace("expecting {} acknowledgements for cluster_state update (version: {})", countDown, clusterStateVersion); this.countDown = new CountDown(countDown); this.ackTimeoutCallback = threadPool.schedule(ackedUpdateTask.ackTimeout(), ThreadPool.Names.GENERIC, new Runnable() { @Override public void run() { onTimeout(); } }); } @Override public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { if (!ackedUpdateTask.mustAck(node)) { //we always wait for the master ack anyway if (!node.equals(nodes.masterNode())) { return; } } if (t == null) { logger.trace("ack received from node [{}], cluster_state update (version: {})", node, clusterStateVersion); } else { this.lastFailure = t; logger.debug("ack received from node [{}], cluster_state update (version: {})", t, node, clusterStateVersion); } if (countDown.countDown()) { logger.trace("all expected nodes acknowledged cluster_state update (version: {})", clusterStateVersion); FutureUtils.cancel(ackTimeoutCallback); ackedUpdateTask.onAllNodesAcked(lastFailure); } } @Override public void onTimeout() { if (countDown.fastForward()) { logger.trace("timeout waiting for acknowledgement for cluster_state update (version: {})", clusterStateVersion); ackedUpdateTask.onAckTimeout(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.taskexecutor; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.MemorySize; import org.apache.flink.configuration.NettyShuffleEnvironmentOptions; import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.runtime.blob.PermanentBlobKey; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.concurrent.FutureUtils; import org.apache.flink.runtime.deployment.InputGateDeploymentDescriptor; import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.ExecutionGraphException; import org.apache.flink.runtime.executiongraph.JobInformation; import org.apache.flink.runtime.executiongraph.PartitionInfo; import org.apache.flink.runtime.executiongraph.TaskInformation; import org.apache.flink.runtime.io.network.partition.PartitionNotFoundException; import org.apache.flink.runtime.io.network.partition.ResultPartitionType; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.jobmaster.JobMasterId; import org.apache.flink.runtime.jobmaster.TestingAbstractInvokables; import org.apache.flink.runtime.jobmaster.utils.TestingJobMasterGateway; import org.apache.flink.runtime.jobmaster.utils.TestingJobMasterGatewayBuilder; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.shuffle.NettyShuffleDescriptor; import org.apache.flink.runtime.shuffle.PartitionDescriptor; import org.apache.flink.runtime.shuffle.PartitionDescriptorBuilder; import org.apache.flink.runtime.shuffle.ShuffleDescriptor; import org.apache.flink.runtime.shuffle.ShuffleEnvironment; import org.apache.flink.runtime.taskexecutor.slot.TaskSlotTable; import org.apache.flink.runtime.taskmanager.Task; import org.apache.flink.runtime.testtasks.BlockingNoOpInvokable; import org.apache.flink.runtime.util.NettyShuffleDescriptorBuilder; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.NetUtils; import org.apache.flink.util.Preconditions; import org.apache.flink.util.SerializedValue; import org.apache.flink.util.TestLogger; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.mockito.Mockito; import java.io.IOException; import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import static org.apache.flink.runtime.util.NettyShuffleDescriptorBuilder.createRemoteWithIdAndLocation; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; /** Tests for submission logic of the {@link TaskExecutor}. */ public class TaskExecutorSubmissionTest extends TestLogger { private static final long TEST_TIMEOUT = 20000L; @Rule public final TestName testName = new TestName(); private static final Time timeout = Time.milliseconds(10000L); private JobID jobId = new JobID(); /** * Tests that we can submit a task to the TaskManager given that we've allocated a slot there. */ @Test(timeout = TEST_TIMEOUT) public void testTaskSubmission() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor( "test task", eid, FutureCompletingInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .addTaskManagerActionListener( eid, ExecutionState.RUNNING, taskRunningFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); } } /** * Tests that the TaskManager sends a proper exception back to the sender if the submit task * message fails. */ @Test(timeout = TEST_TIMEOUT) public void testSubmitTaskFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor( "test task", eid, BlockingNoOpInvokable.class, 0); // this will make the submission fail because the number of key groups // must be >= 1 try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId).build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); } catch (Exception e) { assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); } } /** Tests that we can cancel the task of the TaskManager given that we've submitted it. */ @Test(timeout = TEST_TIMEOUT) public void testTaskSubmissionAndCancelling() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor("test task", eid1, BlockingNoOpInvokable.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor("test task", eid2, BlockingNoOpInvokable.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1CanceledFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener( eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener( eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener( eid1, ExecutionState.CANCELED, task1CanceledFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.RUNNING); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); tmGateway.cancelTask(eid1, timeout); task1CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.CANCELED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.RUNNING); } } /** * Tests that submitted tasks will fail when attempting to send/receive data if no * ResultPartitions/InputGates are set up. */ @Test(timeout = TEST_TIMEOUT) public void testGateChannelEdgeMismatch() throws Exception { final ExecutionAttemptID eid1 = new ExecutionAttemptID(); final ExecutionAttemptID eid2 = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd1 = createTestTaskDeploymentDescriptor( "Sender", eid1, TestingAbstractInvokables.Sender.class); final TaskDeploymentDescriptor tdd2 = createTestTaskDeploymentDescriptor( "Receiver", eid2, TestingAbstractInvokables.Receiver.class); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .addTaskManagerActionListener( eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener( eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener( eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener( eid2, ExecutionState.FAILED, task2FailedFuture) .setSlotSize(2) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, env.getJobMasterId(), timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, env.getJobMasterId(), timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); task2FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FAILED); } } @Test(timeout = TEST_TIMEOUT) public void testRunJobWithForwardChannel() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FinishedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2FinishedFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setNotifyPartitionDataAvailableFunction( resultPartitionID -> CompletableFuture.completedFuture(Acknowledge.get())) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener( eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener( eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener( eid1, ExecutionState.FINISHED, task1FinishedFuture) .addTaskManagerActionListener( eid2, ExecutionState.FINISHED, task2FinishedFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FinishedFuture.get(); task2FinishedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FINISHED); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.FINISHED); } } /** * This tests creates two tasks. The sender sends data but fails to send the state update back * to the job manager. the second one blocks to be canceled */ @Test(timeout = TEST_TIMEOUT) public void testCancellingDependentAndStateUpdateFails() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd1 = createSender(sdd); TaskDeploymentDescriptor tdd2 = createReceiver(sdd); ExecutionAttemptID eid1 = tdd1.getExecutionAttemptId(); ExecutionAttemptID eid2 = tdd2.getExecutionAttemptId(); final CompletableFuture<Void> task1RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2RunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> task1FailedFuture = new CompletableFuture<>(); final CompletableFuture<Void> task2CanceledFuture = new CompletableFuture<>(); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setUpdateTaskExecutionStateFunction( taskExecutionState -> { if (taskExecutionState != null && taskExecutionState.getID().equals(eid1)) { return FutureUtils.completedExceptionally( new ExecutionGraphException( "The execution attempt " + eid2 + " was not found.")); } else { return CompletableFuture.completedFuture(Acknowledge.get()); } }) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(2) .addTaskManagerActionListener( eid1, ExecutionState.RUNNING, task1RunningFuture) .addTaskManagerActionListener( eid2, ExecutionState.RUNNING, task2RunningFuture) .addTaskManagerActionListener( eid1, ExecutionState.FAILED, task1FailedFuture) .addTaskManagerActionListener( eid2, ExecutionState.CANCELED, task2CanceledFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd1.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd1, jobMasterId, timeout).get(); task1RunningFuture.get(); taskSlotTable.allocateSlot(1, jobId, tdd2.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd2, jobMasterId, timeout).get(); task2RunningFuture.get(); task1FailedFuture.get(); assertSame(taskSlotTable.getTask(eid1).getExecutionState(), ExecutionState.FAILED); tmGateway.cancelTask(eid2, timeout); task2CanceledFuture.get(); assertSame(taskSlotTable.getTask(eid2).getExecutionState(), ExecutionState.CANCELED); } } /** * Tests that repeated remote {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = TEST_TIMEOUT) public void testRemotePartitionNotFound() throws Exception { final int dataPort = NetUtils.getAvailablePort(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.DATA_PORT, dataPort); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); // Remote location (on the same TM though) for the partition NettyShuffleDescriptor sdd = NettyShuffleDescriptorBuilder.newBuilder().setDataPort(dataPort).buildRemote(); TaskDeploymentDescriptor tdd = createReceiver(sdd); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(2) .addTaskManagerActionListener( eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setConfiguration(config) .setLocalCommunication(false) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertThat( taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** Tests that the TaskManager fails the task if the partition update fails. */ @Test public void testUpdateTaskInputPartitionsFailure() throws Exception { final ExecutionAttemptID eid = new ExecutionAttemptID(); final TaskDeploymentDescriptor tdd = createTestTaskDeploymentDescriptor("test task", eid, BlockingNoOpInvokable.class); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); final ShuffleEnvironment<?, ?> shuffleEnvironment = mock(ShuffleEnvironment.class, Mockito.RETURNS_MOCKS); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setShuffleEnvironment(shuffleEnvironment) .setSlotSize(1) .addTaskManagerActionListener( eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); final ResourceID producerLocation = env.getTaskExecutor().getResourceID(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), producerLocation); final PartitionInfo partitionUpdate = new PartitionInfo(new IntermediateDataSetID(), shuffleDescriptor); doThrow(new IOException()) .when(shuffleEnvironment) .updatePartitionInfo(eid, partitionUpdate); final CompletableFuture<Acknowledge> updateFuture = tmGateway.updatePartitions( eid, Collections.singletonList(partitionUpdate), timeout); updateFuture.get(); taskFailedFuture.get(); Task task = taskSlotTable.getTask(tdd.getExecutionAttemptId()); assertThat(task.getExecutionState(), is(ExecutionState.FAILED)); assertThat(task.getFailureCause(), instanceOf(IOException.class)); } } /** * Tests that repeated local {@link PartitionNotFoundException}s ultimately fail the receiver. */ @Test(timeout = TEST_TIMEOUT) public void testLocalPartitionNotFound() throws Exception { ResourceID producerLocation = ResourceID.generate(); NettyShuffleDescriptor shuffleDescriptor = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), producerLocation); TaskDeploymentDescriptor tdd = createReceiver(shuffleDescriptor); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); Configuration config = new Configuration(); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_INITIAL, 100); config.setInteger(NettyShuffleEnvironmentOptions.NETWORK_REQUEST_BACKOFF_MAX, 200); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final CompletableFuture<Void> taskFailedFuture = new CompletableFuture<>(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setResourceID(producerLocation) .setSlotSize(1) .addTaskManagerActionListener( eid, ExecutionState.RUNNING, taskRunningFuture) .addTaskManagerActionListener(eid, ExecutionState.FAILED, taskFailedFuture) .setConfiguration(config) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, env.getJobMasterId(), timeout).get(); taskRunningFuture.get(); taskFailedFuture.get(); assertSame(taskSlotTable.getTask(eid).getExecutionState(), ExecutionState.FAILED); assertThat( taskSlotTable.getTask(eid).getFailureCause(), instanceOf(PartitionNotFoundException.class)); } } /** * Test that a failing notifyPartitionDataAvailable call leads to the failing of the respective * task. * * <p>IMPORTANT: We have to make sure that the invokable's cancel method is called, because only * then the future is completed. We do this by not eagerly deploying consumer tasks and * requiring the invokable to fill one memory segment. The completed memory segment will trigger * the scheduling of the downstream operator since it is in pipeline mode. After we've filled * the memory segment, we'll block the invokable and wait for the task failure due to the failed * notifyPartitionDataAvailable call. */ @Test(timeout = TEST_TIMEOUT) public void testFailingNotifyPartitionDataAvailable() throws Exception { final Configuration configuration = new Configuration(); // set the memory segment to the smallest size possible, because we have to fill one // memory buffer to trigger notifyPartitionDataAvailable to the downstream // operators configuration.set(TaskManagerOptions.MEMORY_SEGMENT_SIZE, MemorySize.parse("4096")); NettyShuffleDescriptor sdd = createRemoteWithIdAndLocation( new IntermediateResultPartitionID(), ResourceID.generate()); TaskDeploymentDescriptor tdd = createSender(sdd, TestingAbstractInvokables.TestInvokableRecordCancel.class); ExecutionAttemptID eid = tdd.getExecutionAttemptId(); final CompletableFuture<Void> taskRunningFuture = new CompletableFuture<>(); final Exception exception = new Exception("Failed notifyPartitionDataAvailable"); final JobMasterId jobMasterId = JobMasterId.generate(); TestingJobMasterGateway testingJobMasterGateway = new TestingJobMasterGatewayBuilder() .setFencingTokenSupplier(() -> jobMasterId) .setNotifyPartitionDataAvailableFunction( resultPartitionID -> FutureUtils.completedExceptionally(exception)) .build(); try (TaskSubmissionTestEnvironment env = new TaskSubmissionTestEnvironment.Builder(jobId) .setSlotSize(1) .setConfiguration(configuration) .addTaskManagerActionListener( eid, ExecutionState.RUNNING, taskRunningFuture) .setJobMasterId(jobMasterId) .setJobMasterGateway(testingJobMasterGateway) .useRealNonMockShuffleEnvironment() .build()) { TaskExecutorGateway tmGateway = env.getTaskExecutorGateway(); TaskSlotTable<Task> taskSlotTable = env.getTaskSlotTable(); TestingAbstractInvokables.TestInvokableRecordCancel.resetGotCanceledFuture(); taskSlotTable.allocateSlot(0, jobId, tdd.getAllocationId(), Time.seconds(60)); tmGateway.submitTask(tdd, jobMasterId, timeout).get(); taskRunningFuture.get(); CompletableFuture<Boolean> cancelFuture = TestingAbstractInvokables.TestInvokableRecordCancel.gotCanceled(); assertTrue(cancelFuture.get()); assertTrue( ExceptionUtils.findThrowableWithMessage( taskSlotTable.getTask(eid).getFailureCause(), exception.getMessage()) .isPresent()); } } private TaskDeploymentDescriptor createSender(NettyShuffleDescriptor shuffleDescriptor) throws IOException { return createSender(shuffleDescriptor, TestingAbstractInvokables.Sender.class); } private TaskDeploymentDescriptor createSender( NettyShuffleDescriptor shuffleDescriptor, Class<? extends AbstractInvokable> abstractInvokable) throws IOException { PartitionDescriptor partitionDescriptor = PartitionDescriptorBuilder.newBuilder() .setPartitionId(shuffleDescriptor.getResultPartitionID().getPartitionId()) .build(); ResultPartitionDeploymentDescriptor resultPartitionDeploymentDescriptor = new ResultPartitionDeploymentDescriptor( partitionDescriptor, shuffleDescriptor, 1, true); return createTestTaskDeploymentDescriptor( "Sender", shuffleDescriptor.getResultPartitionID().getProducerId(), abstractInvokable, 1, Collections.singletonList(resultPartitionDeploymentDescriptor), Collections.emptyList()); } private TaskDeploymentDescriptor createReceiver(NettyShuffleDescriptor shuffleDescriptor) throws IOException { InputGateDeploymentDescriptor inputGateDeploymentDescriptor = new InputGateDeploymentDescriptor( new IntermediateDataSetID(), ResultPartitionType.PIPELINED, 0, new ShuffleDescriptor[] {shuffleDescriptor}); return createTestTaskDeploymentDescriptor( "Receiver", new ExecutionAttemptID(), TestingAbstractInvokables.Receiver.class, 1, Collections.emptyList(), Collections.singletonList(inputGateDeploymentDescriptor)); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable) throws IOException { return createTestTaskDeploymentDescriptor(taskName, eid, abstractInvokable, 1); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks) throws IOException { return createTestTaskDeploymentDescriptor( taskName, eid, abstractInvokable, maxNumberOfSubtasks, Collections.emptyList(), Collections.emptyList()); } private TaskDeploymentDescriptor createTestTaskDeploymentDescriptor( String taskName, ExecutionAttemptID eid, Class<? extends AbstractInvokable> abstractInvokable, int maxNumberOfSubtasks, List<ResultPartitionDeploymentDescriptor> producedPartitions, List<InputGateDeploymentDescriptor> inputGates) throws IOException { Preconditions.checkNotNull(producedPartitions); Preconditions.checkNotNull(inputGates); return createTaskDeploymentDescriptor( jobId, testName.getMethodName(), eid, new SerializedValue<>(new ExecutionConfig()), taskName, maxNumberOfSubtasks, 0, 1, 0, new Configuration(), new Configuration(), abstractInvokable.getName(), producedPartitions, inputGates, Collections.emptyList(), Collections.emptyList()); } static TaskDeploymentDescriptor createTaskDeploymentDescriptor( JobID jobId, String jobName, ExecutionAttemptID executionAttemptId, SerializedValue<ExecutionConfig> serializedExecutionConfig, String taskName, int maxNumberOfSubtasks, int subtaskIndex, int numberOfSubtasks, int attemptNumber, Configuration jobConfiguration, Configuration taskConfiguration, String invokableClassName, List<ResultPartitionDeploymentDescriptor> producedPartitions, List<InputGateDeploymentDescriptor> inputGates, Collection<PermanentBlobKey> requiredJarFiles, Collection<URL> requiredClasspaths) throws IOException { JobInformation jobInformation = new JobInformation( jobId, jobName, serializedExecutionConfig, jobConfiguration, requiredJarFiles, requiredClasspaths); TaskInformation taskInformation = new TaskInformation( new JobVertexID(), taskName, numberOfSubtasks, maxNumberOfSubtasks, invokableClassName, taskConfiguration); SerializedValue<JobInformation> serializedJobInformation = new SerializedValue<>(jobInformation); SerializedValue<TaskInformation> serializedJobVertexInformation = new SerializedValue<>(taskInformation); return new TaskDeploymentDescriptor( jobId, new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobInformation), new TaskDeploymentDescriptor.NonOffloaded<>(serializedJobVertexInformation), executionAttemptId, new AllocationID(), subtaskIndex, attemptNumber, null, producedPartitions, inputGates); } /** Test invokable which completes the given future when executed. */ public static class FutureCompletingInvokable extends AbstractInvokable { static final CompletableFuture<Boolean> COMPLETABLE_FUTURE = new CompletableFuture<>(); public FutureCompletingInvokable(Environment environment) { super(environment); } @Override public void invoke() throws Exception { COMPLETABLE_FUTURE.complete(true); } } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.api.runtime.migration; import static org.camunda.bpm.engine.test.api.runtime.migration.ModifiableBpmnModelInstance.modify; import static org.camunda.bpm.engine.test.util.ActivityInstanceAssert.describeActivityInstanceTree; import static org.camunda.bpm.engine.test.util.ExecutionAssert.describeExecutionTree; import org.camunda.bpm.engine.migration.MigrationPlan; import org.camunda.bpm.engine.repository.ProcessDefinition; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.test.ProcessEngineRule; import org.camunda.bpm.engine.test.api.runtime.migration.models.ProcessModels; import org.camunda.bpm.engine.test.api.runtime.migration.models.SignalCatchModels; import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.RuleChain; /** * @author Thorben Lindhauer * */ public class MigrationSignalCatchEventTest { protected ProcessEngineRule rule = new ProvidedProcessEngineRule(); protected MigrationTestRule testHelper = new MigrationTestRule(rule); @Rule public RuleChain ruleChain = RuleChain.outerRule(rule).around(testHelper); @Test public void testMigrateEventSubscription() { // given ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); MigrationPlan migrationPlan = rule.getRuntimeService() .createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId()) .mapActivities("signalCatch", "signalCatch") .build(); // when ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan); // then testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME); testHelper.assertExecutionTreeAfterMigration() .hasProcessDefinitionId(targetProcessDefinition.getId()) .matches( describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId()) .child("signalCatch").scope().id(testHelper.getSingleExecutionIdForActivityBeforeMigration("signalCatch")) .done()); testHelper.assertActivityTreeAfterMigration().hasStructure( describeActivityInstanceTree(targetProcessDefinition.getId()) .activity("signalCatch", testHelper.getSingleActivityInstanceBeforeMigration("signalCatch").getId()) .done()); // and it is possible to trigger the event rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME); testHelper.completeTask("userTask"); testHelper.assertProcessEnded(processInstance.getId()); } @Test public void testMigrateEventSubscriptionChangeActivityId() { // given ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(modify(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS) .changeElementId("signalCatch", "newSignalCatch")); MigrationPlan migrationPlan = rule.getRuntimeService() .createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId()) .mapActivities("signalCatch", "newSignalCatch") .build(); // when ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan); // then testHelper.assertEventSubscriptionMigrated("signalCatch", "newSignalCatch", SignalCatchModels.SIGNAL_NAME); // and it is possible to trigger the event rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME); testHelper.completeTask("userTask"); testHelper.assertProcessEnded(processInstance.getId()); } @Test public void testMigrateEventSubscriptionPreserveSignalName() { // given ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.newModel() .startEvent() .intermediateCatchEvent("signalCatch") .signal("new" + SignalCatchModels.SIGNAL_NAME) .userTask("userTask") .endEvent() .done()); MigrationPlan migrationPlan = rule.getRuntimeService() .createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId()) .mapActivities("signalCatch", "signalCatch") .build(); // when ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan); // then the signal name of the event subscription has not changed testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME); // and it is possible to trigger the event rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME); testHelper.completeTask("userTask"); testHelper.assertProcessEnded(processInstance.getId()); } @Test public void testMigrateEventSubscriptionUpdateSignalName() { // given ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.newModel() .startEvent() .intermediateCatchEvent("signalCatch") .signal("new" + SignalCatchModels.SIGNAL_NAME) .userTask("userTask") .endEvent() .done()); MigrationPlan migrationPlan = rule.getRuntimeService() .createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId()) .mapActivities("signalCatch", "signalCatch") .updateEventTrigger() .build(); // when ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan); // then the message event subscription's event name has not changed testHelper.assertEventSubscriptionMigrated( "signalCatch", SignalCatchModels.SIGNAL_NAME, "signalCatch", "new" + SignalCatchModels.SIGNAL_NAME); // and it is possible to trigger the event rule.getRuntimeService().signalEventReceived("new" + SignalCatchModels.SIGNAL_NAME); testHelper.completeTask("userTask"); testHelper.assertProcessEnded(processInstance.getId()); } @Test public void testMigrateJobAddParentScope() { // given ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.ONE_SIGNAL_CATCH_PROCESS); ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(SignalCatchModels.SUBPROCESS_SIGNAL_CATCH_PROCESS); MigrationPlan migrationPlan = rule.getRuntimeService() .createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId()) .mapActivities("signalCatch", "signalCatch") .build(); // when ProcessInstance processInstance = testHelper.createProcessInstanceAndMigrate(migrationPlan); // then testHelper.assertEventSubscriptionMigrated("signalCatch", "signalCatch", SignalCatchModels.SIGNAL_NAME); testHelper.assertExecutionTreeAfterMigration() .hasProcessDefinitionId(targetProcessDefinition.getId()) .matches( describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId()) .child(null).scope() .child("signalCatch").scope().id(testHelper.getSingleExecutionIdForActivityBeforeMigration("signalCatch")) .done()); testHelper.assertActivityTreeAfterMigration().hasStructure( describeActivityInstanceTree(targetProcessDefinition.getId()) .beginScope("subProcess") .activity("signalCatch", testHelper.getSingleActivityInstanceBeforeMigration("signalCatch").getId()) .done()); // and it is possible to trigger the event rule.getRuntimeService().signalEventReceived(SignalCatchModels.SIGNAL_NAME); testHelper.completeTask("userTask"); testHelper.assertProcessEnded(processInstance.getId()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.hbase.regionserver.StorefileRefresherChore; import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.tool.BulkLoadHFiles; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Category({LargeTests.class, ClientTests.class}) public class TestReplicaWithCluster { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestReplicaWithCluster.class); private static final Logger LOG = LoggerFactory.getLogger(TestReplicaWithCluster.class); private static final int NB_SERVERS = 3; private static final byte[] row = Bytes.toBytes(TestReplicaWithCluster.class.getName()); private static final HBaseTestingUtil HTU = new HBaseTestingUtil(); // second minicluster used in testing of replication private static HBaseTestingUtil HTU2; private static final byte[] f = HConstants.CATALOG_FAMILY; private final static int REFRESH_PERIOD = 1000; private final static int META_SCAN_TIMEOUT_IN_MILLISEC = 200; /** * This copro is used to synchronize the tests. */ public static class SlowMeCopro implements RegionCoprocessor, RegionObserver { static final AtomicLong sleepTime = new AtomicLong(0); static final AtomicReference<CountDownLatch> cdl = new AtomicReference<>(new CountDownLatch(0)); public SlowMeCopro() { } @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(this); } @Override public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() == 0) { CountDownLatch latch = cdl.get(); try { if (sleepTime.get() > 0) { LOG.info("Sleeping for " + sleepTime.get() + " ms"); Thread.sleep(sleepTime.get()); } else if (latch.getCount() > 0) { LOG.info("Waiting for the counterCountDownLatch"); latch.await(2, TimeUnit.MINUTES); // To help the tests to finish. if (latch.getCount() > 0) { throw new RuntimeException("Can't wait more"); } } } catch (InterruptedException e1) { LOG.error(e1.toString(), e1); } } else { LOG.info("We're not the primary replicas."); } } } /** * This copro is used to simulate region server down exception for Get and Scan */ @CoreCoprocessor public static class RegionServerStoppedCopro implements RegionCoprocessor, RegionObserver { public RegionServerStoppedCopro() { } @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(this); } @Override public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { int replicaId = e.getEnvironment().getRegion().getRegionInfo().getReplicaId(); // Fail for the primary replica and replica 1 if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() <= 1) { LOG.info("Throw Region Server Stopped Exceptoin for replica id " + replicaId); throw new RegionServerStoppedException("Server " + e.getEnvironment().getServerName() + " not running"); } else { LOG.info("We're replica region " + replicaId); } } @Override public void preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan) throws IOException { int replicaId = e.getEnvironment().getRegion().getRegionInfo().getReplicaId(); // Fail for the primary replica and replica 1 if (e.getEnvironment().getRegion().getRegionInfo().getReplicaId() <= 1) { LOG.info("Throw Region Server Stopped Exceptoin for replica id " + replicaId); throw new RegionServerStoppedException("Server " + e.getEnvironment().getServerName() + " not running"); } else { LOG.info("We're replica region " + replicaId); } } } /** * This copro is used to slow down the primary meta region scan a bit */ public static class RegionServerHostingPrimayMetaRegionSlowOrStopCopro implements RegionCoprocessor, RegionObserver { static boolean slowDownPrimaryMetaScan = false; static boolean throwException = false; @Override public Optional<RegionObserver> getRegionObserver() { return Optional.of(this); } @Override public void preGetOp(final ObserverContext<RegionCoprocessorEnvironment> e, final Get get, final List<Cell> results) throws IOException { int replicaId = e.getEnvironment().getRegion().getRegionInfo().getReplicaId(); // Fail for the primary replica, but not for meta if (throwException) { if (!e.getEnvironment().getRegion().getRegionInfo().isMetaRegion() && (replicaId == 0)) { LOG.info("Get, throw Region Server Stopped Exceptoin for region " + e.getEnvironment() .getRegion().getRegionInfo()); throw new RegionServerStoppedException("Server " + e.getEnvironment().getServerName() + " not running"); } } else { LOG.info("Get, We're replica region " + replicaId); } } @Override public void preScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> e, final Scan scan) throws IOException { int replicaId = e.getEnvironment().getRegion().getRegionInfo().getReplicaId(); // Slow down with the primary meta region scan if (e.getEnvironment().getRegion().getRegionInfo().isMetaRegion() && (replicaId == 0)) { if (slowDownPrimaryMetaScan) { LOG.info("Scan with primary meta region, slow down a bit"); try { Thread.sleep(META_SCAN_TIMEOUT_IN_MILLISEC - 50); } catch (InterruptedException ie) { // Ingore } } // Fail for the primary replica if (throwException) { LOG.info("Scan, throw Region Server Stopped Exceptoin for replica " + e.getEnvironment() .getRegion().getRegionInfo()); throw new RegionServerStoppedException("Server " + e.getEnvironment().getServerName() + " not running"); } else { LOG.info("Scan, We're replica region " + replicaId); } } else { LOG.info("Scan, We're replica region " + replicaId); } } } @BeforeClass public static void beforeClass() throws Exception { // enable store file refreshing HTU.getConfiguration().setInt(StorefileRefresherChore.REGIONSERVER_STOREFILE_REFRESH_PERIOD, REFRESH_PERIOD); HTU.getConfiguration().setFloat("hbase.regionserver.logroll.multiplier", 0.0001f); HTU.getConfiguration().setInt("replication.source.size.capacity", 10240); HTU.getConfiguration().setLong("replication.source.sleepforretries", 100); HTU.getConfiguration().setInt("hbase.regionserver.maxlogs", 2); HTU.getConfiguration().setLong("hbase.master.logcleaner.ttl", 10); HTU.getConfiguration().setInt("zookeeper.recovery.retry", 1); HTU.getConfiguration().setInt("zookeeper.recovery.retry.intervalmill", 10); // Wait for primary call longer so make sure that it will get exception from the primary call HTU.getConfiguration().setInt("hbase.client.primaryCallTimeout.get", 1000000); HTU.getConfiguration().setInt("hbase.client.primaryCallTimeout.scan", 1000000); // Make sure master does not host system tables. HTU.getConfiguration().set("hbase.balancer.tablesOnMaster", "none"); // Set system coprocessor so it can be applied to meta regions HTU.getConfiguration().set("hbase.coprocessor.region.classes", RegionServerHostingPrimayMetaRegionSlowOrStopCopro.class.getName()); HTU.getConfiguration().setInt(HConstants.HBASE_CLIENT_META_REPLICA_SCAN_TIMEOUT, META_SCAN_TIMEOUT_IN_MILLISEC * 1000); HTU.startMiniCluster(NB_SERVERS); // Enable meta replica at server side HBaseTestingUtil.setReplicas(HTU.getAdmin(), TableName.META_TABLE_NAME, 2); HTU.getHBaseCluster().startMaster(); } @AfterClass public static void afterClass() throws Exception { if (HTU2 != null) HTU2.shutdownMiniCluster(); HTU.shutdownMiniCluster(); } @Test public void testCreateDeleteTable() throws IOException { // Create table then get the single region for our new table. TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor(TableName.valueOf("testCreateDeleteTable"), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); builder.setRegionReplication(NB_SERVERS); builder.setCoprocessor(SlowMeCopro.class.getName()); TableDescriptor hdt = builder.build(); Table table = HTU.createTable(hdt, new byte[][] { f }, null); Put p = new Put(row); p.addColumn(f, row, row); table.put(p); Get g = new Get(row); Result r = table.get(g); Assert.assertFalse(r.isStale()); try { // But if we ask for stale we will get it SlowMeCopro.cdl.set(new CountDownLatch(1)); g = new Get(row); g.setConsistency(Consistency.TIMELINE); r = table.get(g); Assert.assertTrue(r.isStale()); SlowMeCopro.cdl.get().countDown(); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } HTU.getAdmin().disableTable(hdt.getTableName()); HTU.deleteTable(hdt.getTableName()); } @Test public void testChangeTable() throws Exception { TableDescriptor td = TableDescriptorBuilder.newBuilder(TableName.valueOf("testChangeTable")) .setRegionReplication(NB_SERVERS) .setCoprocessor(SlowMeCopro.class.getName()) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(f)) .build(); HTU.getAdmin().createTable(td); Table table = HTU.getConnection().getTable(td.getTableName()); // basic test: it should work. Put p = new Put(row); p.addColumn(f, row, row); table.put(p); Get g = new Get(row); Result r = table.get(g); Assert.assertFalse(r.isStale()); // Add a CF, it should work. TableDescriptor bHdt = HTU.getAdmin().getDescriptor(td.getTableName()); td = TableDescriptorBuilder.newBuilder(td) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(row)) .build(); HTU.getAdmin().disableTable(td.getTableName()); HTU.getAdmin().modifyTable(td); HTU.getAdmin().enableTable(td.getTableName()); TableDescriptor nHdt = HTU.getAdmin().getDescriptor(td.getTableName()); Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()), bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount()); p = new Put(row); p.addColumn(row, row, row); table.put(p); g = new Get(row); r = table.get(g); Assert.assertFalse(r.isStale()); try { SlowMeCopro.cdl.set(new CountDownLatch(1)); g = new Get(row); g.setConsistency(Consistency.TIMELINE); r = table.get(g); Assert.assertTrue(r.isStale()); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } Admin admin = HTU.getAdmin(); nHdt =admin.getDescriptor(td.getTableName()); Assert.assertEquals("fams=" + Arrays.toString(nHdt.getColumnFamilies()), bHdt.getColumnFamilyCount() + 1, nHdt.getColumnFamilyCount()); admin.disableTable(td.getTableName()); admin.deleteTable(td.getTableName()); admin.close(); } @SuppressWarnings("deprecation") @Test public void testReplicaAndReplication() throws Exception { TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor("testReplicaAndReplication"); builder.setRegionReplication(NB_SERVERS); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(row) .setScope(HConstants.REPLICATION_SCOPE_GLOBAL).build()); builder.setCoprocessor(SlowMeCopro.class.getName()); TableDescriptor tableDescriptor = builder.build(); HTU.getAdmin().createTable(tableDescriptor, HBaseTestingUtil.KEYS_FOR_HBA_CREATE_TABLE); Configuration conf2 = HBaseConfiguration.create(HTU.getConfiguration()); conf2.set(HConstants.HBASE_CLIENT_INSTANCE_ID, String.valueOf(-1)); conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2"); MiniZooKeeperCluster miniZK = HTU.getZkCluster(); HTU2 = new HBaseTestingUtil(conf2); HTU2.setZkCluster(miniZK); HTU2.startMiniCluster(NB_SERVERS); LOG.info("Setup second Zk"); HTU2.getAdmin().createTable(tableDescriptor, HBaseTestingUtil.KEYS_FOR_HBA_CREATE_TABLE); try (Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Admin admin = connection.getAdmin()) { ReplicationPeerConfig rpc = ReplicationPeerConfig.newBuilder() .setClusterKey(HTU2.getClusterKey()).build(); admin.addReplicationPeer("2", rpc); } Put p = new Put(row); p.addColumn(row, row, row); final Table table = HTU.getConnection().getTable(tableDescriptor.getTableName()); table.put(p); HTU.getAdmin().flush(table.getName()); LOG.info("Put & flush done on the first cluster. Now doing a get on the same cluster."); Waiter.waitFor(HTU.getConfiguration(), 1000, new Waiter.Predicate<Exception>() { @Override public boolean evaluate() throws Exception { try { SlowMeCopro.cdl.set(new CountDownLatch(1)); Get g = new Get(row); g.setConsistency(Consistency.TIMELINE); Result r = table.get(g); Assert.assertTrue(r.isStale()); return !r.isEmpty(); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } } }); table.close(); LOG.info("stale get on the first cluster done. Now for the second."); final Table table2 = HTU.getConnection().getTable(tableDescriptor.getTableName()); Waiter.waitFor(HTU.getConfiguration(), 1000, new Waiter.Predicate<Exception>() { @Override public boolean evaluate() throws Exception { try { SlowMeCopro.cdl.set(new CountDownLatch(1)); Get g = new Get(row); g.setConsistency(Consistency.TIMELINE); Result r = table2.get(g); Assert.assertTrue(r.isStale()); return !r.isEmpty(); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } } }); table2.close(); HTU.getAdmin().disableTable(tableDescriptor.getTableName()); HTU.deleteTable(tableDescriptor.getTableName()); HTU2.getAdmin().disableTable(tableDescriptor.getTableName()); HTU2.deleteTable(tableDescriptor.getTableName()); // We shutdown HTU2 minicluster later, in afterClass(), as shutting down // the minicluster has negative impact of deleting all HConnections in JVM. } @Test public void testBulkLoad() throws IOException { // Create table then get the single region for our new table. LOG.debug("Creating test table"); TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor( TableName.valueOf("testBulkLoad"), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); builder.setRegionReplication(NB_SERVERS); builder.setCoprocessor(SlowMeCopro.class.getName()); TableDescriptor hdt = builder.build(); Table table = HTU.createTable(hdt, new byte[][] { f }, null); // create hfiles to load. LOG.debug("Creating test data"); Path dir = HTU.getDataTestDirOnTestFS("testBulkLoad"); final int numRows = 10; final byte[] qual = Bytes.toBytes("qual"); final byte[] val = Bytes.toBytes("val"); Map<byte[], List<Path>> family2Files = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (ColumnFamilyDescriptor col : hdt.getColumnFamilies()) { Path hfile = new Path(dir, col.getNameAsString()); TestHRegionServerBulkLoad.createHFile(HTU.getTestFileSystem(), hfile, col.getName(), qual, val, numRows); family2Files.put(col.getName(), Collections.singletonList(hfile)); } // bulk load HFiles LOG.debug("Loading test data"); BulkLoadHFiles.create(HTU.getConfiguration()).bulkLoad(hdt.getTableName(), family2Files); // verify we can read them from the primary LOG.debug("Verifying data load"); for (int i = 0; i < numRows; i++) { byte[] row = TestHRegionServerBulkLoad.rowkey(i); Get g = new Get(row); Result r = table.get(g); Assert.assertFalse(r.isStale()); } // verify we can read them from the replica LOG.debug("Verifying replica queries"); try { SlowMeCopro.cdl.set(new CountDownLatch(1)); for (int i = 0; i < numRows; i++) { byte[] row = TestHRegionServerBulkLoad.rowkey(i); Get g = new Get(row); g.setConsistency(Consistency.TIMELINE); Result r = table.get(g); Assert.assertTrue(r.isStale()); } SlowMeCopro.cdl.get().countDown(); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } HTU.getAdmin().disableTable(hdt.getTableName()); HTU.deleteTable(hdt.getTableName()); } @Test public void testReplicaGetWithPrimaryDown() throws IOException { // Create table then get the single region for our new table. TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor(TableName.valueOf("testCreateDeleteTable"), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); builder.setRegionReplication(NB_SERVERS); builder.setCoprocessor(RegionServerStoppedCopro.class.getName()); TableDescriptor hdt = builder.build(); try { Table table = HTU.createTable(hdt, new byte[][] { f }, null); Put p = new Put(row); p.addColumn(f, row, row); table.put(p); // Flush so it can be picked by the replica refresher thread HTU.flush(table.getName()); // Sleep for some time until data is picked up by replicas try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it Get g = new Get(row); g.setConsistency(Consistency.TIMELINE); Result r = table.get(g); Assert.assertTrue(r.isStale()); } finally { HTU.getAdmin().disableTable(hdt.getTableName()); HTU.deleteTable(hdt.getTableName()); } } @Test public void testReplicaScanWithPrimaryDown() throws IOException { // Create table then get the single region for our new table. TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor(TableName.valueOf("testCreateDeleteTable"), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); builder.setRegionReplication(NB_SERVERS); builder.setCoprocessor(RegionServerStoppedCopro.class.getName()); TableDescriptor hdt = builder.build(); try { Table table = HTU.createTable(hdt, new byte[][] { f }, null); Put p = new Put(row); p.addColumn(f, row, row); table.put(p); // Flush so it can be picked by the replica refresher thread HTU.flush(table.getName()); // Sleep for some time until data is picked up by replicas try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { LOG.error(e1.toString(), e1); } // But if we ask for stale we will get it // Instantiating the Scan class Scan scan = new Scan(); // Scanning the required columns scan.addFamily(f); scan.setConsistency(Consistency.TIMELINE); // Getting the scan result ResultScanner scanner = table.getScanner(scan); Result r = scanner.next(); Assert.assertTrue(r.isStale()); } finally { HTU.getAdmin().disableTable(hdt.getTableName()); HTU.deleteTable(hdt.getTableName()); } } @Test public void testReplicaGetWithAsyncRpcClientImpl() throws IOException { HTU.getConfiguration().setBoolean("hbase.ipc.client.specificThreadForWriting", true); HTU.getConfiguration().set("hbase.rpc.client.impl", "org.apache.hadoop.hbase.ipc.AsyncRpcClient"); // Create table then get the single region for our new table. TableDescriptorBuilder builder = HTU.createModifyableTableDescriptor(TableName.valueOf("testReplicaGetWithAsyncRpcClientImpl"), ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED); builder.setRegionReplication(NB_SERVERS); builder.setCoprocessor(SlowMeCopro.class.getName()); TableDescriptor hdt = builder.build(); try { Table table = HTU.createTable(hdt, new byte[][] { f }, null); Put p = new Put(row); p.addColumn(f, row, row); table.put(p); // Flush so it can be picked by the replica refresher thread HTU.flush(table.getName()); // Sleep for some time until data is picked up by replicas try { Thread.sleep(2 * REFRESH_PERIOD); } catch (InterruptedException e1) { LOG.error(e1.toString(), e1); } try { // Create the new connection so new config can kick in Connection connection = ConnectionFactory.createConnection(HTU.getConfiguration()); Table t = connection.getTable(hdt.getTableName()); // But if we ask for stale we will get it SlowMeCopro.cdl.set(new CountDownLatch(1)); Get g = new Get(row); g.setConsistency(Consistency.TIMELINE); Result r = t.get(g); Assert.assertTrue(r.isStale()); SlowMeCopro.cdl.get().countDown(); } finally { SlowMeCopro.cdl.get().countDown(); SlowMeCopro.sleepTime.set(0); } } finally { HTU.getConfiguration().unset("hbase.ipc.client.specificThreadForWriting"); HTU.getConfiguration().unset("hbase.rpc.client.impl"); HTU.getAdmin().disableTable(hdt.getTableName()); HTU.deleteTable(hdt.getTableName()); } } }
/** */ package com.specmate.migration.test.objectadded.testmodel.base.util; import com.specmate.migration.test.objectadded.testmodel.base.*; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.util.Switch; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see com.specmate.migration.test.objectadded.testmodel.base.BasePackage * @generated */ public class BaseSwitch<T> extends Switch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static BasePackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public BaseSwitch() { if (modelPackage == null) { modelPackage = BasePackage.eINSTANCE; } } /** * Checks whether this is a switch for the given package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param ePackage the package in question. * @return whether this is a switch for the given package. * @generated */ @Override protected boolean isSwitchFor(EPackage ePackage) { return ePackage == modelPackage; } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ @Override protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case BasePackage.IID: { IID iid = (IID)theEObject; T result = caseIID(iid); if (result == null) result = defaultCase(theEObject); return result; } case BasePackage.ICONTENT_ELEMENT: { IContentElement iContentElement = (IContentElement)theEObject; T result = caseIContentElement(iContentElement); if (result == null) result = caseIID(iContentElement); if (result == null) result = defaultCase(theEObject); return result; } case BasePackage.ICONTAINER: { IContainer iContainer = (IContainer)theEObject; T result = caseIContainer(iContainer); if (result == null) result = caseIContentElement(iContainer); if (result == null) result = caseIID(iContainer); if (result == null) result = defaultCase(theEObject); return result; } case BasePackage.FOLDER: { Folder folder = (Folder)theEObject; T result = caseFolder(folder); if (result == null) result = caseIContainer(folder); if (result == null) result = caseIContentElement(folder); if (result == null) result = caseIID(folder); if (result == null) result = defaultCase(theEObject); return result; } case BasePackage.ITESTABLE: { ITestable iTestable = (ITestable)theEObject; T result = caseITestable(iTestable); if (result == null) result = defaultCase(theEObject); return result; } case BasePackage.IMODIFIABLE: { IModifiable iModifiable = (IModifiable)theEObject; T result = caseIModifiable(iModifiable); if (result == null) result = caseITestable(iModifiable); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>IID</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>IID</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIID(IID object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>IContent Element</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>IContent Element</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIContentElement(IContentElement object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>IContainer</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>IContainer</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIContainer(IContainer object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Folder</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Folder</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseFolder(Folder object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>ITestable</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>ITestable</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseITestable(ITestable object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>IModifiable</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>IModifiable</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIModifiable(IModifiable object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ @Override public T defaultCase(EObject object) { return null; } } //BaseSwitch
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.runners.inprocess; import com.google.auto.value.AutoValue; import com.google.cloud.dataflow.sdk.Pipeline; import com.google.cloud.dataflow.sdk.runners.inprocess.InMemoryWatermarkManager.FiredTimers; import com.google.cloud.dataflow.sdk.runners.inprocess.InProcessPipelineRunner.CommittedBundle; import com.google.cloud.dataflow.sdk.transforms.AppliedPTransform; import com.google.cloud.dataflow.sdk.transforms.PTransform; import com.google.cloud.dataflow.sdk.util.KeyedWorkItem; import com.google.cloud.dataflow.sdk.util.KeyedWorkItems; import com.google.cloud.dataflow.sdk.util.TimeDomain; import com.google.cloud.dataflow.sdk.util.TimerInternals.TimerData; import com.google.cloud.dataflow.sdk.util.WindowedValue; import com.google.cloud.dataflow.sdk.values.PCollection; import com.google.cloud.dataflow.sdk.values.PValue; import com.google.common.base.MoreObjects; import com.google.common.base.Optional; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import org.joda.time.Instant; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; /** * An {@link InProcessExecutor} that uses an underlying {@link ExecutorService} and * {@link InProcessEvaluationContext} to execute a {@link Pipeline}. */ final class ExecutorServiceParallelExecutor implements InProcessExecutor { private static final Logger LOG = LoggerFactory.getLogger(ExecutorServiceParallelExecutor.class); private final ExecutorService executorService; private final Map<PValue, Collection<AppliedPTransform<?, ?, ?>>> valueToConsumers; private final Set<PValue> keyedPValues; private final TransformEvaluatorRegistry registry; @SuppressWarnings("rawtypes") private final Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements; private final InProcessEvaluationContext evaluationContext; private final LoadingCache<StepAndKey, TransformExecutorService> executorServices; private final Queue<ExecutorUpdate> allUpdates; private final BlockingQueue<VisibleExecutorUpdate> visibleUpdates; private final TransformExecutorService parallelExecutorService; private final CompletionCallback defaultCompletionCallback; private Collection<AppliedPTransform<?, ?, ?>> rootNodes; public static ExecutorServiceParallelExecutor create( ExecutorService executorService, Map<PValue, Collection<AppliedPTransform<?, ?, ?>>> valueToConsumers, Set<PValue> keyedPValues, TransformEvaluatorRegistry registry, @SuppressWarnings("rawtypes") Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements, InProcessEvaluationContext context) { return new ExecutorServiceParallelExecutor( executorService, valueToConsumers, keyedPValues, registry, transformEnforcements, context); } private ExecutorServiceParallelExecutor( ExecutorService executorService, Map<PValue, Collection<AppliedPTransform<?, ?, ?>>> valueToConsumers, Set<PValue> keyedPValues, TransformEvaluatorRegistry registry, @SuppressWarnings("rawtypes") Map<Class<? extends PTransform>, Collection<ModelEnforcementFactory>> transformEnforcements, InProcessEvaluationContext context) { this.executorService = executorService; this.valueToConsumers = valueToConsumers; this.keyedPValues = keyedPValues; this.registry = registry; this.transformEnforcements = transformEnforcements; this.evaluationContext = context; // Weak Values allows TransformExecutorServices that are no longer in use to be reclaimed. // Executing TransformExecutorServices have a strong reference to their TransformExecutorService // which stops the TransformExecutorServices from being prematurely garbage collected executorServices = CacheBuilder.newBuilder().weakValues().build(serialTransformExecutorServiceCacheLoader()); this.allUpdates = new ConcurrentLinkedQueue<>(); this.visibleUpdates = new ArrayBlockingQueue<>(20); parallelExecutorService = TransformExecutorServices.parallel(executorService); defaultCompletionCallback = new DefaultCompletionCallback(); } private CacheLoader<StepAndKey, TransformExecutorService> serialTransformExecutorServiceCacheLoader() { return new CacheLoader<StepAndKey, TransformExecutorService>() { @Override public TransformExecutorService load(StepAndKey stepAndKey) throws Exception { return TransformExecutorServices.serial(executorService); } }; } @Override public void start(Collection<AppliedPTransform<?, ?, ?>> roots) { rootNodes = ImmutableList.copyOf(roots); Runnable monitorRunnable = new MonitorRunnable(); executorService.submit(monitorRunnable); } @SuppressWarnings("unchecked") public void scheduleConsumption( AppliedPTransform<?, ?, ?> consumer, @Nullable CommittedBundle<?> bundle, CompletionCallback onComplete) { evaluateBundle(consumer, bundle, onComplete); } private <T> void evaluateBundle( final AppliedPTransform<?, ?, ?> transform, @Nullable final CommittedBundle<T> bundle, final CompletionCallback onComplete) { TransformExecutorService transformExecutor; if (bundle != null && isKeyed(bundle.getPCollection())) { final StepAndKey stepAndKey = StepAndKey.of(transform, bundle == null ? null : bundle.getKey()); // This executor will remain reachable until it has executed all scheduled transforms. // The TransformExecutors keep a strong reference to the Executor, the ExecutorService keeps // a reference to the scheduled TransformExecutor callable. Follow-up TransformExecutors // (scheduled due to the completion of another TransformExecutor) are provided to the // ExecutorService before the Earlier TransformExecutor callable completes. transformExecutor = executorServices.getUnchecked(stepAndKey); } else { transformExecutor = parallelExecutorService; } Collection<ModelEnforcementFactory> enforcements = MoreObjects.firstNonNull( transformEnforcements.get(transform.getTransform().getClass()), Collections.<ModelEnforcementFactory>emptyList()); TransformExecutor<T> callable = TransformExecutor.create( registry, enforcements, evaluationContext, bundle, transform, onComplete, transformExecutor); transformExecutor.schedule(callable); } private boolean isKeyed(PValue pvalue) { return keyedPValues.contains(pvalue); } private void scheduleConsumers(ExecutorUpdate update) { CommittedBundle<?> bundle = update.getBundle().get(); for (AppliedPTransform<?, ?, ?> consumer : update.getConsumers()) { scheduleConsumption(consumer, bundle, defaultCompletionCallback); } } @Override public void awaitCompletion() throws Throwable { VisibleExecutorUpdate update; do { update = visibleUpdates.take(); if (update.throwable.isPresent()) { throw update.throwable.get(); } } while (!update.isDone()); executorService.shutdown(); } /** * The base implementation of {@link CompletionCallback} that provides implementations for * {@link #handleResult(CommittedBundle, InProcessTransformResult)} and * {@link #handleThrowable(CommittedBundle, Throwable)}, given an implementation of * {@link #getCommittedResult(CommittedBundle, InProcessTransformResult)}. */ private abstract class CompletionCallbackBase implements CompletionCallback { protected abstract CommittedResult getCommittedResult( CommittedBundle<?> inputBundle, InProcessTransformResult result); @Override public final CommittedResult handleResult( CommittedBundle<?> inputBundle, InProcessTransformResult result) { CommittedResult committedResult = getCommittedResult(inputBundle, result); for (CommittedBundle<?> outputBundle : committedResult.getOutputs()) { allUpdates.offer(ExecutorUpdate.fromBundle(outputBundle, valueToConsumers.get(outputBundle.getPCollection()))); } CommittedBundle<?> unprocessedInputs = committedResult.getUnprocessedInputs(); if (unprocessedInputs != null && !Iterables.isEmpty(unprocessedInputs.getElements())) { allUpdates.offer(ExecutorUpdate.fromBundle(unprocessedInputs, Collections.<AppliedPTransform<?, ?, ?>>singleton(committedResult.getTransform()))); } return committedResult; } @Override public final void handleThrowable(CommittedBundle<?> inputBundle, Throwable t) { allUpdates.offer(ExecutorUpdate.fromThrowable(t)); } } /** * The default {@link CompletionCallback}. The default completion callback is used to complete * transform evaluations that are triggered due to the arrival of elements from an upstream * transform, or for a source transform. */ private class DefaultCompletionCallback extends CompletionCallbackBase { @Override public CommittedResult getCommittedResult( CommittedBundle<?> inputBundle, InProcessTransformResult result) { return evaluationContext.handleResult(inputBundle, Collections.<TimerData>emptyList(), result); } } /** * A {@link CompletionCallback} where the completed bundle was produced to deliver some collection * of {@link TimerData timers}. When the evaluator completes successfully, reports all of the * timers used to create the input to the {@link InProcessEvaluationContext evaluation context} * as part of the result. */ private class TimerCompletionCallback extends CompletionCallbackBase { private final Iterable<TimerData> timers; private TimerCompletionCallback(Iterable<TimerData> timers) { this.timers = timers; } @Override public CommittedResult getCommittedResult( CommittedBundle<?> inputBundle, InProcessTransformResult result) { return evaluationContext.handleResult(inputBundle, timers, result); } } /** * An internal status update on the state of the executor. * * Used to signal when the executor should be shut down (due to an exception). */ @AutoValue abstract static class ExecutorUpdate { public static ExecutorUpdate fromBundle( CommittedBundle<?> bundle, Collection<AppliedPTransform<?, ?, ?>> consumers) { return new AutoValue_ExecutorServiceParallelExecutor_ExecutorUpdate( Optional.of(bundle), consumers, Optional.<Throwable>absent()); } public static ExecutorUpdate fromThrowable(Throwable t) { return new AutoValue_ExecutorServiceParallelExecutor_ExecutorUpdate( Optional.<CommittedBundle<?>>absent(), Collections.<AppliedPTransform<?, ?, ?>>emptyList(), Optional.of(t)); } /** * Returns the bundle that produced this update. */ public abstract Optional<? extends CommittedBundle<?>> getBundle(); /** * Returns the transforms to process the bundle. If nonempty, {@link #getBundle()} will return * a present {@link Optional}. */ public abstract Collection<AppliedPTransform<?, ?, ?>> getConsumers(); public abstract Optional<? extends Throwable> getException(); } /** * An update of interest to the user. Used in {@link #awaitCompletion} to decide whether to * return normally or throw an exception. */ private static class VisibleExecutorUpdate { private final Optional<? extends Throwable> throwable; private final boolean done; public static VisibleExecutorUpdate fromThrowable(Throwable e) { return new VisibleExecutorUpdate(false, e); } public static VisibleExecutorUpdate finished() { return new VisibleExecutorUpdate(true, null); } private VisibleExecutorUpdate(boolean done, @Nullable Throwable exception) { this.throwable = Optional.fromNullable(exception); this.done = done; } public boolean isDone() { return done; } } private class MonitorRunnable implements Runnable { // arbitrary termination condition to ensure progress in the presence of pushback private final long maxTimeProcessingUpdatesNanos = TimeUnit.MILLISECONDS.toNanos(5L); private final String runnableName = String.format("%s$%s-monitor", evaluationContext.getPipelineOptions().getAppName(), ExecutorServiceParallelExecutor.class.getSimpleName()); @Override public void run() { String oldName = Thread.currentThread().getName(); Thread.currentThread().setName(runnableName); try { ExecutorUpdate update = allUpdates.poll(); int numUpdates = 0; // pull all of the pending work off of the queue long updatesStart = System.nanoTime(); while (update != null) { LOG.debug("Executor Update: {}", update); if (update.getBundle().isPresent()) { scheduleConsumers(update); } else if (update.getException().isPresent()) { visibleUpdates.offer(VisibleExecutorUpdate.fromThrowable(update.getException().get())); } if (System.nanoTime() - updatesStart > maxTimeProcessingUpdatesNanos) { break; } else { update = allUpdates.poll(); } } boolean timersFired = fireTimers(); addWorkIfNecessary(timersFired); } catch (InterruptedException e) { Thread.currentThread().interrupt(); LOG.error("Monitor died due to being interrupted"); while (!visibleUpdates.offer(VisibleExecutorUpdate.fromThrowable(e))) { visibleUpdates.poll(); } } catch (Throwable t) { LOG.error("Monitor thread died due to throwable", t); while (!visibleUpdates.offer(VisibleExecutorUpdate.fromThrowable(t))) { visibleUpdates.poll(); } } finally { if (!shouldShutdown()) { // The monitor thread should always be scheduled; but we only need to be scheduled once executorService.submit(this); } Thread.currentThread().setName(oldName); } } /** * Fires any available timers. Returns true if at least one timer was fired. */ private boolean fireTimers() throws Exception { try { boolean firedTimers = false; for (Map.Entry<AppliedPTransform<?, ?, ?>, Map<Object, FiredTimers>> transformTimers : evaluationContext.extractFiredTimers().entrySet()) { AppliedPTransform<?, ?, ?> transform = transformTimers.getKey(); for (Map.Entry<Object, FiredTimers> keyTimers : transformTimers.getValue().entrySet()) { for (TimeDomain domain : TimeDomain.values()) { Collection<TimerData> delivery = keyTimers.getValue().getTimers(domain); if (delivery.isEmpty()) { continue; } KeyedWorkItem<Object, Object> work = KeyedWorkItems.timersWorkItem(keyTimers.getKey(), delivery); @SuppressWarnings({"unchecked", "rawtypes"}) CommittedBundle<?> bundle = evaluationContext .createKeyedBundle( null, keyTimers.getKey(), (PCollection) transform.getInput()) .add(WindowedValue.valueInEmptyWindows(work)) .commit(Instant.now()); scheduleConsumption(transform, bundle, new TimerCompletionCallback(delivery)); firedTimers = true; } } } return firedTimers; } catch (Exception e) { LOG.error("Internal Error while delivering timers", e); throw e; } } private boolean shouldShutdown() { if (evaluationContext.isDone()) { LOG.debug("Pipeline is finished. Shutting down. {}"); while (!visibleUpdates.offer(VisibleExecutorUpdate.finished())) { visibleUpdates.poll(); } executorService.shutdown(); return true; } return false; } /** * If all active {@link TransformExecutor TransformExecutors} are in a blocked state, * add more work from root nodes that may have additional work. This ensures that if a pipeline * has elements available from the root nodes it will add those elements when necessary. */ private void addWorkIfNecessary(boolean firedTimers) { // If any timers have fired, they will add more work; We don't need to add more if (firedTimers) { return; } // All current TransformExecutors are blocked; add more work from the roots. for (AppliedPTransform<?, ?, ?> root : rootNodes) { if (!evaluationContext.isDone(root)) { scheduleConsumption(root, null, defaultCompletionCallback); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.twill.internal; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.gson.Gson; import com.google.gson.JsonElement; import org.apache.twill.api.LocalFile; import org.apache.twill.api.RunId; import org.apache.twill.api.RuntimeSpecification; import org.apache.twill.filesystem.Location; import org.apache.twill.internal.state.Message; import org.apache.twill.launcher.FindFreePort; import org.apache.twill.launcher.TwillLauncher; import org.apache.twill.zookeeper.NodeData; import org.apache.twill.zookeeper.ZKClient; import org.apache.twill.zookeeper.ZKOperations; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; /** * This class helps launching a container. */ public final class TwillContainerLauncher { private static final Logger LOG = LoggerFactory.getLogger(TwillContainerLauncher.class); private static final double HEAP_MIN_RATIO = 0.7d; private final RuntimeSpecification runtimeSpec; private final ProcessLauncher.PrepareLaunchContext launchContext; private final ZKClient zkClient; private final int instanceCount; private final JvmOptions jvmOpts; private final int reservedMemory; private final Location secureStoreLocation; public TwillContainerLauncher(RuntimeSpecification runtimeSpec, ProcessLauncher.PrepareLaunchContext launchContext, ZKClient zkClient, int instanceCount, JvmOptions jvmOpts, int reservedMemory, Location secureStoreLocation) { this.runtimeSpec = runtimeSpec; this.launchContext = launchContext; this.zkClient = zkClient; this.instanceCount = instanceCount; this.jvmOpts = jvmOpts; this.reservedMemory = reservedMemory; this.secureStoreLocation = secureStoreLocation; } public TwillContainerController start(RunId runId, int instanceId, Class<?> mainClass, String classPath) { ProcessLauncher.PrepareLaunchContext.AfterResources afterResources = null; ProcessLauncher.PrepareLaunchContext.ResourcesAdder resourcesAdder = null; // Clean up zookeeper path in case this is a retry and there are old messages and state there. Futures.getUnchecked(ZKOperations.ignoreError( ZKOperations.recursiveDelete(zkClient, "/" + runId), KeeperException.NoNodeException.class, null)); // Adds all file to be localized to container if (!runtimeSpec.getLocalFiles().isEmpty()) { resourcesAdder = launchContext.withResources(); for (LocalFile localFile : runtimeSpec.getLocalFiles()) { afterResources = resourcesAdder.add(localFile); } } // Optionally localize secure store. try { if (secureStoreLocation != null && secureStoreLocation.exists()) { if (resourcesAdder == null) { resourcesAdder = launchContext.withResources(); } afterResources = resourcesAdder.add(new DefaultLocalFile(Constants.Files.CREDENTIALS, secureStoreLocation.toURI(), secureStoreLocation.lastModified(), secureStoreLocation.length(), false, null)); } } catch (IOException e) { LOG.warn("Failed to launch container with secure store {}.", secureStoreLocation.toURI()); } if (afterResources == null) { afterResources = launchContext.noResources(); } int memory = runtimeSpec.getResourceSpecification().getMemorySize(); if (((double) (memory - reservedMemory) / memory) >= HEAP_MIN_RATIO) { // Reduce -Xmx by the reserved memory size. memory = runtimeSpec.getResourceSpecification().getMemorySize() - reservedMemory; } else { // If it is a small VM, just discount it by the min ratio. memory = (int) Math.ceil(memory * HEAP_MIN_RATIO); } // Currently no reporting is supported for runnable containers ProcessLauncher.PrepareLaunchContext.MoreEnvironment afterEnvironment = afterResources .withEnvironment() .add(EnvKeys.TWILL_RUN_ID, runId.getId()) .add(EnvKeys.TWILL_RUNNABLE_NAME, runtimeSpec.getName()) .add(EnvKeys.TWILL_INSTANCE_ID, Integer.toString(instanceId)) .add(EnvKeys.TWILL_INSTANCE_COUNT, Integer.toString(instanceCount)); // assemble the command based on jvm options ImmutableList.Builder<String> commandBuilder = ImmutableList.builder(); String firstCommand; if (jvmOpts.getDebugOptions().doDebug(runtimeSpec.getName())) { // for debugging we run a quick Java program to find a free port, then pass that port as the debug port and also // as a System property to the runnable (Java has no general way to determine the port from within the JVM). // PORT=$(java FindFreePort) && java -agentlib:jdwp=...,address=\$PORT -Dtwill.debug.port=\$PORT... TwillLauncher // The $ must be escaped, otherwise it gets expanded (to "") before the command is submitted. String suspend = jvmOpts.getDebugOptions().doSuspend() ? "y" : "n"; firstCommand = "TWILL_DEBUG_PORT=$($JAVA_HOME/bin/java"; commandBuilder.add("-cp", Constants.Files.LAUNCHER_JAR, FindFreePort.class.getName() + ")", "&&", // this will stop if FindFreePort fails "$JAVA_HOME/bin/java", "-agentlib:jdwp=transport=dt_socket,server=y,suspend=" + suspend + "," + "address=\\$TWILL_DEBUG_PORT", "-Dtwill.debug.port=\\$TWILL_DEBUG_PORT" ); } else { firstCommand = "$JAVA_HOME/bin/java"; } commandBuilder.add("-Djava.io.tmpdir=tmp", "-Dyarn.container=$" + EnvKeys.YARN_CONTAINER_ID, "-Dtwill.runnable=$" + EnvKeys.TWILL_APP_NAME + ".$" + EnvKeys.TWILL_RUNNABLE_NAME, "-cp", Constants.Files.LAUNCHER_JAR + ":" + classPath, "-Xmx" + memory + "m"); if (jvmOpts.getExtraOptions() != null) { commandBuilder.add(jvmOpts.getExtraOptions()); } commandBuilder.add(TwillLauncher.class.getName(), Constants.Files.CONTAINER_JAR, mainClass.getName(), Boolean.TRUE.toString()); List<String> command = commandBuilder.build(); ProcessController<Void> processController = afterEnvironment .withCommands().add(firstCommand, command.toArray(new String[command.size()])) .redirectOutput(Constants.STDOUT).redirectError(Constants.STDERR) .launch(); TwillContainerControllerImpl controller = new TwillContainerControllerImpl(zkClient, runId, processController); controller.start(); return controller; } private static final class TwillContainerControllerImpl extends AbstractZKServiceController implements TwillContainerController { private final ProcessController<Void> processController; private volatile ContainerLiveNodeData liveData; protected TwillContainerControllerImpl(ZKClient zkClient, RunId runId, ProcessController<Void> processController) { super(runId, zkClient); this.processController = processController; } @Override protected void doStartUp() { // No-op } @Override protected void doShutDown() { // No-op } @Override protected void instanceNodeUpdated(NodeData nodeData) { if (nodeData == null || nodeData.getData() == null) { LOG.warn("Instance node was updated but data is null."); return; } try { Gson gson = new Gson(); JsonElement json = gson.fromJson(new String(nodeData.getData(), Charsets.UTF_8), JsonElement.class); if (json.isJsonObject()) { JsonElement data = json.getAsJsonObject().get("data"); if (data != null) { this.liveData = gson.fromJson(data, ContainerLiveNodeData.class); LOG.info("Container LiveNodeData updated: " + new String(nodeData.getData(), Charsets.UTF_8)); } } } catch (Throwable t) { LOG.warn("Error deserializing updated instance node data", t); } } @Override protected void instanceNodeFailed(Throwable cause) { // No-op } @Override public ListenableFuture<Message> sendMessage(Message message) { return sendMessage(message, message); } @Override public synchronized void completed(int exitStatus) { forceShutDown(); } @Override public ContainerLiveNodeData getLiveNodeData() { return liveData; } @Override public void kill() { processController.cancel(); } } }
package ml.temporal; import static ml.utils.InPlaceOperator.mtimes; import static ml.utils.InPlaceOperator.operate; import static ml.utils.InPlaceOperator.timesAssign; import static ml.utils.Matlab.norm; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import la.matrix.DenseMatrix; import la.matrix.Matrix; import la.vector.DenseVector; import ml.optimization.AcceleratedProximalGradient; import ml.optimization.ProxL1; import ml.optimization.ProxPlus; import ml.optimization.ProximalMapping; import ml.options.Options; import ml.utils.Matlab; import ml.utils.Printer; public class ReflectedLogisticRegression extends SurvivalScoreModel { public static void main(String[] args) { } private double rho; private Options options; public ReflectedLogisticRegression(double lambda) { options.lambda = lambda; } public ReflectedLogisticRegression(Options options) { this.options = options; } public void initialize(double rho0) { this.rho = rho0; } public void initialize(double... params) { if (params.length == 3) { rho = params[1]; } else { rho = params[0]; } } @Override public void train() { int ny = Y.getColumnDimension(); int maxIter = options.maxIter; if (Double.isInfinite(rho)) rho = 1; double lambda = this.options.lambda; DenseVector weights = new DenseVector(n, 0); DenseVector GradWT = new DenseVector(n, 0); W = Matlab.ones(p, ny); Matrix GradW = W.copy(); Matrix Theta = Matlab.ones(1, 1); Theta.setEntry(0, 0, rho); double gradTheta = 0; Matrix ThetaGrad = Matlab.ones(1, 1); Matrix XW = Matlab.zeros(Matlab.size(Y)); double gval = 0; double hval = 0; mtimes(XW, X, W); gval = 0; for (int i = 0; i < n; i++) { double mu = XW.getEntry(i, 0); double t = T.getEntry(i, 0); double g = 1.0 / (1 + Math.exp(rho * (t - mu))); double y = Y.getEntry(i, 0); double e = g - y; gval += e * e; weights.set(i, (g - y) * rho * g * (1 - g)); } operate(GradWT, weights, X); for (int j = 0; j < p; j++) { GradW.setEntry(j, 0, GradWT.get(j)); } timesAssign(GradW, 2.0 / n); gval /= n; hval = lambda * norm(W, 1); ProximalMapping proxL1 = new ProxL1(lambda); ProximalMapping proxPlus = new ProxPlus(); AcceleratedProximalGradient.type = 0; boolean flags[] = null; double epsilon = 1e-3; int k = 0; int APGMaxIter = 1000; double fval = 0; double fval_pre = 0; int cnt = 0; while(true) { // Update W: AcceleratedProximalGradient.prox = proxL1; while (true) { flags = AcceleratedProximalGradient.run(GradW, gval, hval, epsilon, W); // flags = NonnegativePLBFGS.run(ThetaGrad, gval, epsilon, Theta); // flags = LBFGS.run(ThetaGrad, gval, epsilon, Theta); // flags = AcceleratedGradientDescent.run(ThetaGrad, gval, epsilon, Theta); if (flags[0]) break; gval = 0; mtimes(XW, X, W); for (int i = 0; i < n; i++) { double mu = XW.getEntry(i, 0); double t = T.getEntry(i, 0); double g = 1.0 / (1 + Math.exp(rho * (t - mu))); double y = Y.getEntry(i, 0); double e = g - y; gval += e * e; weights.set(i, (g - y) * rho * g * (1 - g)); } gval /= n; hval = lambda * norm(W, 1); /* * Compute the objective function value, if flags[1] is true * gradient will also be computed. */ if (flags[1]) { k = k + 1; // Printer.fprintf("Iter %d - gval: %.4f\n", k, gval); // Compute the gradient if (k > APGMaxIter) break; operate(GradWT, weights, X); for (int j = 0; j < p; j++) { GradW.setEntry(j, 0, GradWT.get(j)); } timesAssign(GradW, 2.0 / n); } } // Update Theta: // We already computed gval for the new W, hval is always 0 AcceleratedProximalGradient.prox = proxPlus; hval = 0; gradTheta = 0; for (int i = 0; i < n; i++) { double mu = XW.getEntry(i, 0); double t = T.getEntry(i, 0); double g = 1.0 / (1 + Math.exp(rho * (t - mu))); double y = Y.getEntry(i, 0); gradTheta += (g - y) * (mu - t) * g * (1 - g); } gradTheta *= 2.0 / n; ThetaGrad.setEntry(0, 0, gradTheta); while (true) { flags = AcceleratedProximalGradient.run(ThetaGrad, gval, hval, epsilon, Theta); // flags = NonnegativePLBFGS.run(ThetaGrad, gval, epsilon, Theta); // flags = LBFGS.run(ThetaGrad, gval, epsilon, Theta); // flags = AcceleratedGradientDescent.run(ThetaGrad, gval, epsilon, Theta); if (flags[0]) break; gval = 0; gradTheta = 0; rho = Theta.getEntry(0, 0); for (int i = 0; i < n; i++) { double mu = XW.getEntry(i, 0); double t = T.getEntry(i, 0); double g = 1.0 / (1 + Math.exp(rho * (t - mu))); double y = Y.getEntry(i, 0); double e = g - y; gval += e * e; gradTheta += (g - y) * (mu - t) * g * (1 - g); } gval /= n; /* * Compute the objective function value, if flags[1] is true * gradient will also be computed. */ if (flags[1]) { k = k + 1; // Printer.fprintf("Iter %d - gval: %.4f\n", k, gval); // Compute the gradient if (k > APGMaxIter) break; gradTheta *= 2.0 / n; ThetaGrad.setEntry(0, 0, gradTheta); } } cnt++; fval = gval + lambda * norm(W, 1); Printer.fprintf("Iter %d - fval: %.4f\n", cnt, fval); if ( cnt > 1 && Math.abs(fval_pre - fval) < Matlab.eps) //break; fval_pre = fval; if (cnt > maxIter) break; } } @Override public Matrix predict(Matrix Xt, Matrix Tt) { Matrix XtW = Xt.mtimes(W); int n = Xt.getRowDimension(); Matrix PredY = new DenseMatrix(n, 1); for (int i = 0; i < n; i++) { double mu = XtW.getEntry(i, 0); double t = Tt.getEntry(i, 0); double g = 1.0 / (1 + Math.exp(rho * (t - mu))); PredY.setEntry(i, 0, g); } return PredY; } @Override public void loadModel(String filePath) { // System.out.println("Loading regression model..."); try { ObjectInputStream ois = new ObjectInputStream(new FileInputStream(filePath)); W = (Matrix)ois.readObject(); rho = ois.readDouble(); ois.close(); System.out.println("Model loaded."); } catch (FileNotFoundException e) { e.printStackTrace(); System.exit(1); } catch (IOException e) { e.printStackTrace(); } catch (ClassNotFoundException e) { e.printStackTrace(); } } @Override public void saveModel(String filePath) { File parentFile = new File(filePath).getParentFile(); if (parentFile != null && !parentFile.exists()) { parentFile.mkdirs(); } try { ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(filePath)); oos.writeObject(W); oos.writeObject(new Double(rho)); oos.close(); System.out.println("Model saved."); } catch (FileNotFoundException e) { e.printStackTrace(); System.exit(1); } catch (IOException e) { e.printStackTrace(); } } }
/* The contents of this file are subject to the license and copyright terms * detailed in the license directory at the root of the source tree (also * available online at http://fedora-commons.org/license/). */ package org.fcrepo.server.journal; import org.fcrepo.server.Context; import org.fcrepo.server.errors.GeneralException; import org.fcrepo.server.errors.ModuleInitializationException; import org.fcrepo.server.errors.ModuleShutdownException; import org.fcrepo.server.errors.ServerException; import org.fcrepo.server.journal.entry.CreatorJournalEntry; import org.fcrepo.server.management.ManagementDelegate; import org.fcrepo.server.messaging.PName; import org.fcrepo.server.storage.types.Datastream; import org.fcrepo.server.storage.types.RelationshipTuple; import org.fcrepo.server.storage.types.Validation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.InputStream; import java.util.Date; import java.util.Map; /** * This is the worker class to use in Journaling mode (normal mode). * <p/> * Each time a "writing" Management method is called, create a * CreatorJournalEntry and ask it to invoke the method on the * ManagementDelegate. If a "read-only" Management method is called, just pass * it along to the ManagementDelegate. * * @author Jim Blake */ public class JournalCreator implements JournalWorker, JournalConstants { private static final Logger logger = LoggerFactory.getLogger(JournalCreator.class); private final JournalWriter writer; private final String role; private ManagementDelegate delegate; /** * Get a JournalWriter to use, based on the server parameters. */ public JournalCreator(Map<String, String> parameters, String role, ServerInterface server) throws ModuleInitializationException { this.role = role; try { writer = JournalWriter.getInstance(parameters, role, server); } catch (JournalException e) { String msg = "Problem creating the JournalWriter"; logger.error(msg, e); throw new ModuleInitializationException(msg, role, e); } } /** * Receive a ManagementDelegate module to perform the Management operations. */ public void setManagementDelegate(ManagementDelegate delegate) { this.delegate = delegate; } /** * Server is shutting down, so tell the JournalWriter to shut down. */ public void shutdown() throws ModuleShutdownException { try { writer.shutdown(); } catch (JournalException e) { throw new ModuleShutdownException("JournalWriter generated an error on shutdown()", role, e); } } // // ------------------------------------------------------------------------- // // Create a Journal entry for each call to one of the Management API // "writing" methods. // // ------------------------------------------------------------------------- // /** * Let the delegate do it, and then write a journal entry. */ public String ingest(Context context, InputStream serialization, String logMessage, String format, String encoding, String pid) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_INGEST, context); cje.addArgument(ARGUMENT_NAME_SERIALIZATION, serialization); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); cje.addArgument(ARGUMENT_NAME_FORMAT, format); cje.addArgument(ARGUMENT_NAME_ENCODING, encoding); cje.addArgument(ARGUMENT_NAME_NEW_PID, pid); return (String) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date modifyObject(Context context, String pid, String state, String label, String ownerId, String logMessage, Date lastModifiedDate) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_MODIFY_OBJECT, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_STATE, state); cje.addArgument(ARGUMENT_NAME_LABEL, label); cje.addArgument(ARGUMENT_NAME_OWNERID, ownerId); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); cje.addArgument(ARGUMENT_NAME_LAST_MODIFIED_DATE, lastModifiedDate); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date purgeObject(Context context, String pid, String logMessage) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_PURGE_OBJECT, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public String addDatastream(Context context, String pid, String dsID, String[] altIDs, String dsLabel, boolean versionable, String MIMEType, String formatURI, String location, String controlGroup, String dsState, String checksumType, String checksum, String logMessage) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_ADD_DATASTREAM, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, dsID); cje.addArgument(ARGUMENT_NAME_ALT_IDS, altIDs); cje.addArgument(ARGUMENT_NAME_DS_LABEL, dsLabel); cje.addArgument(ARGUMENT_NAME_VERSIONABLE, versionable); cje.addArgument(ARGUMENT_NAME_MIME_TYPE, MIMEType); cje.addArgument(ARGUMENT_NAME_FORMAT_URI, formatURI); cje.addArgument(ARGUMENT_NAME_LOCATION, location); cje.addArgument(ARGUMENT_NAME_CONTROL_GROUP, controlGroup); cje.addArgument(ARGUMENT_NAME_DS_STATE, dsState); cje.addArgument(ARGUMENT_NAME_CHECKSUM_TYPE, checksumType); cje.addArgument(ARGUMENT_NAME_CHECKSUM, checksum); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); return (String) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date modifyDatastreamByValue(Context context, String pid, String datastreamID, String[] altIDs, String dsLabel, String mimeType, String formatURI, InputStream dsContent, String checksumType, String checksum, String logMessage, Date lastModifiedDate) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_MODIFY_DATASTREAM_BY_VALUE, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, datastreamID); cje.addArgument(ARGUMENT_NAME_ALT_IDS, altIDs); cje.addArgument(ARGUMENT_NAME_DS_LABEL, dsLabel); cje.addArgument(ARGUMENT_NAME_MIME_TYPE, mimeType); cje.addArgument(ARGUMENT_NAME_FORMAT_URI, formatURI); cje.addArgument(ARGUMENT_NAME_DS_CONTENT, dsContent); cje.addArgument(ARGUMENT_NAME_CHECKSUM_TYPE, checksumType); cje.addArgument(ARGUMENT_NAME_CHECKSUM, checksum); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); cje.addArgument(ARGUMENT_NAME_LAST_MODIFIED_DATE, lastModifiedDate); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date modifyDatastreamByReference(Context context, String pid, String datastreamID, String[] altIDs, String dsLabel, String mimeType, String formatURI, String dsLocation, String checksumType, String checksum, String logMessage, Date lastModifiedDate) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_MODIFY_DATASTREAM_BY_REFERENCE, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, datastreamID); cje.addArgument(ARGUMENT_NAME_ALT_IDS, altIDs); cje.addArgument(ARGUMENT_NAME_DS_LABEL, dsLabel); cje.addArgument(ARGUMENT_NAME_MIME_TYPE, mimeType); cje.addArgument(ARGUMENT_NAME_FORMAT_URI, formatURI); cje.addArgument(ARGUMENT_NAME_DS_LOCATION, dsLocation); cje.addArgument(ARGUMENT_NAME_CHECKSUM_TYPE, checksumType); cje.addArgument(ARGUMENT_NAME_CHECKSUM, checksum); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); cje.addArgument(ARGUMENT_NAME_LAST_MODIFIED_DATE, lastModifiedDate); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date setDatastreamState(Context context, String pid, String dsID, String dsState, String logMessage) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_SET_DATASTREAM_STATE, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, dsID); cje.addArgument(ARGUMENT_NAME_DS_STATE, dsState); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date setDatastreamVersionable(Context context, String pid, String dsID, boolean versionable, String logMessage) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_SET_DATASTREAM_VERSIONABLE, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, dsID); cje.addArgument(ARGUMENT_NAME_VERSIONABLE, versionable); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); return (Date) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public Date[] purgeDatastream(Context context, String pid, String datastreamID, Date startDT, Date endDT, String logMessage) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_PURGE_DATASTREAM, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_DS_ID, datastreamID); cje.addArgument(ARGUMENT_NAME_START_DATE, startDT); cje.addArgument(ARGUMENT_NAME_END_DATE, endDT); cje.addArgument(ARGUMENT_NAME_LOG_MESSAGE, logMessage); return (Date[]) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public String putTempStream(Context context, InputStream in) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_PUT_TEMP_STREAM, context); cje.addArgument(ARGUMENT_NAME_IN, in); return (String) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } /** * Create a journal entry, add the arguments, and invoke the method. */ public String[] getNextPID(Context context, int numPIDs, String namespace) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_GET_NEXT_PID, context); cje.addArgument(ARGUMENT_NAME_NUM_PIDS, numPIDs); cje.addArgument(ARGUMENT_NAME_NAMESPACE, namespace); return (String[]) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } public boolean addRelationship(Context context, String pid, String relationship, String objURI, boolean isLiteral, String datatype) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_ADD_RELATIONSHIP, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_RELATIONSHIP, relationship); cje.addArgument(ARGUMENT_NAME_OBJECT, objURI); cje.addArgument(ARGUMENT_NAME_IS_LITERAL, isLiteral); cje.addArgument(ARGUMENT_NAME_DATATYPE, datatype); return (Boolean) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } public boolean purgeRelationship(Context context, String pid, String relationship, String object, boolean isLiteral, String datatype) throws ServerException { try { CreatorJournalEntry cje = new CreatorJournalEntry(METHOD_PURGE_RELATIONSHIP, context); cje.addArgument(ARGUMENT_NAME_PID, pid); cje.addArgument(ARGUMENT_NAME_RELATIONSHIP, relationship); cje.addArgument(ARGUMENT_NAME_OBJECT, object); cje.addArgument(ARGUMENT_NAME_IS_LITERAL, isLiteral); cje.addArgument(ARGUMENT_NAME_DATATYPE, datatype); return (Boolean) cje.invokeAndClose(delegate, writer); } catch (JournalException e) { throw new GeneralException("Problem creating the Journal", e); } } // // ------------------------------------------------------------------------- // // For read-only methods, don't bother with a Journal entry. // // ------------------------------------------------------------------------- // /** * Let the delegate do it. */ public String compareDatastreamChecksum(Context context, String pid, String dsID, Date versionDate) throws ServerException { return delegate.compareDatastreamChecksum(context, pid, dsID, versionDate); } /** * Let the delegate do it. */ public RelationshipTuple[] getRelationships(Context context, String pid, String relationship) throws ServerException { return delegate.getRelationships(context, pid, relationship); } /** * Let the delegate do it. */ public InputStream getObjectXML(Context context, String pid, String encoding) throws ServerException { return delegate.getObjectXML(context, pid, encoding); } /** * Let the delegate do it. */ public InputStream export(Context context, String pid, String format, String exportContext, String encoding) throws ServerException { return delegate.export(context, pid, format, exportContext, encoding); } /** * Let the delegate do it. */ public Datastream getDatastream(Context context, String pid, String datastreamID, Date asOfDateTime) throws ServerException { return delegate.getDatastream(context, pid, datastreamID, asOfDateTime); } /** * Let the delegate do it. */ public Datastream[] getDatastreams(Context context, String pid, Date asOfDateTime, String dsState) throws ServerException { return delegate.getDatastreams(context, pid, asOfDateTime, dsState); } /** * Let the delegate do it. */ public Datastream[] getDatastreamHistory(Context context, String pid, String datastreamID) throws ServerException { return delegate.getDatastreamHistory(context, pid, datastreamID); } /** * Let the delegate do it. */ public InputStream getTempStream(String id) throws ServerException { return delegate.getTempStream(id); } /** * Let the delegate do it. */ public Validation validate(@PName("context") Context context, @PName("pid") String pid, @PName("asOfDateTime") Date asOfDateTime) throws ServerException { return delegate.validate(context, pid, asOfDateTime); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.partitioned; import java.io.ByteArrayInputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.CancelException; import org.apache.geode.DataSerializer; import org.apache.geode.cache.CacheException; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionStats; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.Version; import org.apache.geode.internal.cache.ForceReattemptException; import org.apache.geode.internal.cache.InitialImageOperation; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.PartitionedRegionDataStore; import org.apache.geode.internal.cache.TXManagerImpl; import org.apache.geode.internal.cache.TXStateProxy; import org.apache.geode.internal.cache.tier.InterestType; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.internal.logging.log4j.LogMarker; import org.apache.geode.internal.util.ObjectIntProcedure; public class FetchKeysMessage extends PartitionMessage { private static final Logger logger = LogService.getLogger(); private Integer bucketId; /** * the interest policy to use in processing the keys */ private int interestType; /** * the argument for the interest type (regex string, className, list of keys) */ private Object interestArg; private boolean allowTombstones; private FetchKeysMessage(InternalDistributedMember recipient, int regionId, ReplyProcessor21 processor, Integer bucketId, int itype, Object interestArg, boolean allowTombstones) { super(recipient, regionId, processor); this.bucketId = bucketId; this.interestType = itype; this.interestArg = interestArg; this.allowTombstones = allowTombstones; } /** * Empty constructor to satisfy {@link DataSerializer} requirements */ public FetchKeysMessage() {} /** * Sends a PartitionedRegion message to fetch keys for a bucketId * * @param recipient the member that the fetch keys message is sent to * @param r the PartitionedRegion that contains the bucket * @param bucketId the identity of the bucket that contains the keys to be returned * @param allowTombstones whether to include destroyed entries in the result * @return the processor used to read the returned keys * @throws ForceReattemptException if the peer is no longer available */ public static FetchKeysResponse send(InternalDistributedMember recipient, PartitionedRegion r, Integer bucketId, boolean allowTombstones) throws ForceReattemptException { Assert.assertTrue(recipient != null, "FetchKeysMessage NULL recipient"); TXManagerImpl txManager = r.getCache().getTxManager(); boolean resetTxState = isTransactionInternalSuspendNeeded(txManager); TXStateProxy txStateProxy = null; if (resetTxState) { txStateProxy = txManager.pauseTransaction(); } try { FetchKeysMessage tmp = new FetchKeysMessage(); FetchKeysResponse p = (FetchKeysResponse) tmp.createReplyProcessor(r, Collections.singleton(recipient)); FetchKeysMessage m = new FetchKeysMessage(recipient, r.getPRId(), p, bucketId, InterestType.REGULAR_EXPRESSION, ".*", allowTombstones); m.setTransactionDistributed(txManager.isDistributed()); Set failures = r.getDistributionManager().putOutgoing(m); if (failures != null && failures.size() > 0) { throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_FAILED_SENDING_0.toLocalizedString(m)); } return p; } finally { if (resetTxState) { txManager.unpauseTransaction(txStateProxy); } } } private static boolean isTransactionInternalSuspendNeeded(TXManagerImpl txManager) { TXStateProxy txState = txManager.getTXState(); // handle distributed transaction when needed. return txState != null && txState.isRealDealLocal() && !txState.isDistTx(); } /** * @return the FetchKeysResponse * @throws ForceReattemptException if the peer is no longer available */ public static FetchKeysResponse sendInterestQuery(InternalDistributedMember recipient, PartitionedRegion r, Integer bucketId, int itype, Object arg, boolean allowTombstones) throws ForceReattemptException { Assert.assertTrue(recipient != null, "FetchKeysMessage NULL recipient"); FetchKeysMessage tmp = new FetchKeysMessage(); FetchKeysResponse p = (FetchKeysResponse) tmp.createReplyProcessor(r, Collections.singleton(recipient)); FetchKeysMessage m = new FetchKeysMessage(recipient, r.getPRId(), p, bucketId, itype, arg, allowTombstones); m.setTransactionDistributed(r.getCache().getTxManager().isDistributed()); Set failures = r.getDistributionManager().putOutgoing(m); if (failures != null && failures.size() > 0) { throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_FAILED_SENDING_0.toLocalizedString(m)); } return p; } // override processor type @Override PartitionResponse createReplyProcessor(PartitionedRegion r, Set recipients) { return new FetchKeysResponse(r.getSystem(), r, recipients); } @Override protected boolean operateOnPartitionedRegion(ClusterDistributionManager dm, PartitionedRegion r, long startTime) throws CacheException, ForceReattemptException { if (logger.isDebugEnabled()) { logger.debug("FetchKeysMessage operateOnRegion: {} bucketId: {} type: {} {}", r.getFullPath(), this.bucketId, InterestType.getString(interestType), (allowTombstones ? " with tombstones" : " without tombstones")); } PartitionedRegionDataStore ds = r.getDataStore(); if (ds != null) { try { Set keys = ds.handleRemoteGetKeys(this.bucketId, interestType, interestArg, allowTombstones); if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "FetchKeysMessage sending {} keys back using processorId: : {}", keys.size(), getProcessorId(), keys); } r.getPrStats().endPartitionMessagesProcessing(startTime); FetchKeysReplyMessage.send(getSender(), getProcessorId(), dm, keys); } catch (PRLocallyDestroyedException pde) { if (logger.isDebugEnabled()) { logger.debug("FetchKeysMessage Encountered PRLocallyDestroyedException"); } throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_ENCOUNTERED_PRLOCALLYDESTROYEDEXCEPTION .toLocalizedString(), pde); } } else { logger.warn(LocalizedMessage.create( LocalizedStrings.FetchKeysMessage_FETCHKEYSMESSAGE_DATA_STORE_NOT_CONFIGURED_FOR_THIS_MEMBER)); } // Unless there was an exception thrown, this message handles sending the response return false; } @Override protected void appendFields(StringBuilder buff) { super.appendFields(buff); buff.append("; bucketId=").append(this.bucketId); } public int getDSFID() { return PR_FETCH_KEYS_MESSAGE; } /** * Versions in which on-wire form has changed, requiring new toData/fromData methods */ public Version[] serializationVersions = null; public Version[] getSerializationVersions() { return serializationVersions; } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.bucketId = Integer.valueOf(in.readInt()); this.interestType = in.readInt(); this.interestArg = DataSerializer.readObject(in); this.allowTombstones = in.readBoolean(); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(this.bucketId.intValue()); out.writeInt(interestType); DataSerializer.writeObject(interestArg, out); out.writeBoolean(this.allowTombstones); } public static class FetchKeysReplyMessage extends ReplyMessage { /** * The number of the series */ int seriesNum; /** * The message number in the series */ int msgNum; /** * The total number of series */ int numSeries; /** * Whether this is the last of a series */ boolean lastInSeries; /** * the stream holding the chunk to send */ transient HeapDataOutputStream chunkStream; /** * the array holding data received */ transient byte[] chunk; /** * Empty constructor to conform to DataSerializable interface */ public FetchKeysReplyMessage() {} private FetchKeysReplyMessage(InternalDistributedMember recipient, int processorId, HeapDataOutputStream chunk, int seriesNum, int msgNum, int numSeries, boolean lastInSeries) { super(); setRecipient(recipient); setProcessorId(processorId); this.seriesNum = seriesNum; this.msgNum = msgNum; this.numSeries = numSeries; this.lastInSeries = lastInSeries; this.chunkStream = chunk; } /** * Send an ack * * @throws ForceReattemptException if the peer is no longer available */ public static void send(final InternalDistributedMember recipient, final int processorId, final DistributionManager dm, Set keys) throws ForceReattemptException { Assert.assertTrue(recipient != null, "FetchKeysReplyMessage NULL reply message"); final int numSeries = 1; final int seriesNum = 0; // chunkEntries returns false if didn't finish if (logger.isDebugEnabled()) { logger.debug("Starting pr keys chunking for {} kets to member {}", keys.size(), recipient); } try { boolean finished = chunkSet(recipient, keys, InitialImageOperation.CHUNK_SIZE_IN_BYTES, false, new ObjectIntProcedure() { int msgNum = 0; boolean last = false; /** * @param a byte[] chunk * @param b positive if last chunk * @return true to continue to next chunk */ public boolean executeWith(Object a, int b) { // if (this.last) // throw new // InternalGemFireError(LocalizedStrings.FetchKeysMessage_ALREADY_PROCESSED_LAST_CHUNK.toLocalizedString()); HeapDataOutputStream chunk = (HeapDataOutputStream) a; this.last = b > 0; try { boolean okay = sendChunk(recipient, processorId, dm, chunk, seriesNum, msgNum++, numSeries, this.last); return okay; } catch (CancelException e) { return false; } } }); if (logger.isDebugEnabled()) { logger.debug("{} pr keys chunking", (finished ? "Finished" : "DID NOT complete")); } } catch (IOException io) { throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_UNABLE_TO_SEND_RESPONSE_TO_FETCH_KEYS_REQUEST .toLocalizedString(), io); } // TODO [bruce] pass a reference to the cache or region down here so we can do this test // Assert.assertTrue(!cache is closed, "chunking interrupted but cache is still open"); } static boolean sendChunk(InternalDistributedMember recipient, int processorId, DistributionManager dm, HeapDataOutputStream chunk, int seriesNum, int msgNum, int numSeries, boolean lastInSeries) { FetchKeysReplyMessage reply = new FetchKeysReplyMessage(recipient, processorId, chunk, seriesNum, msgNum, numSeries, lastInSeries); Set failures = dm.putOutgoing(reply); return (failures == null) || (failures.size() == 0); } /** * Serialize the given set's elments into byte[] chunks, calling proc for each one. proc args: * the byte[] chunk and an int indicating whether it is the last chunk (positive means last * chunk, zero othewise). The return value of proc indicates whether to continue to the next * chunk (true) or abort (false). * * @return true if finished all chunks, false if stopped early */ static boolean chunkSet(InternalDistributedMember recipient, Set set, int CHUNK_SIZE_IN_BYTES, boolean includeValues, ObjectIntProcedure proc) throws IOException { Iterator it = set.iterator(); boolean keepGoing = true; boolean sentLastChunk = false; // always write at least one chunk final HeapDataOutputStream mos = new HeapDataOutputStream( InitialImageOperation.CHUNK_SIZE_IN_BYTES + 2048, recipient.getVersionObject()); do { mos.reset(); int avgItemSize = 0; int itemCount = 0; while ((mos.size() + avgItemSize) < InitialImageOperation.CHUNK_SIZE_IN_BYTES && it.hasNext()) { Object key = it.next(); DataSerializer.writeObject(key, mos); // Note we track the itemCount so we can compute avgItemSize itemCount++; // Note we track avgItemSize to help us not to always go one item // past the max chunk size. When we go past it causes us to grow // the ByteBuffer that the chunk is stored in resulting in a copy // of the data. avgItemSize = mos.size() / itemCount; } // Write "end of chunk" entry to indicate end of chunk DataSerializer.writeObject((Object) null, mos); // send 1 for last message if no more data int lastMsg = it.hasNext() ? 0 : 1; keepGoing = proc.executeWith(mos, lastMsg); sentLastChunk = lastMsg == 1 && keepGoing; // if this region is destroyed while we are sending data, then abort. } while (keepGoing && it.hasNext()); // return false if we were told to abort return sentLastChunk; } /** * Processes this message. This method is invoked by the receiver of the message. * * @param dm the distribution manager that is processing the message. */ @Override public void process(final DistributionManager dm, final ReplyProcessor21 p) { final long startTime = getTimestamp(); FetchKeysResponse processor = (FetchKeysResponse) p; if (processor == null) { if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "FetchKeysReplyMessage processor not found"); } return; } processor.processChunk(this); if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "{} processed {}", processor, this); } dm.getStats().incReplyMessageTime(DistributionStats.getStatTime() - startTime); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(this.seriesNum); out.writeInt(this.msgNum); out.writeInt(this.numSeries); out.writeBoolean(this.lastInSeries); DataSerializer.writeObjectAsByteArray(this.chunkStream, out); } @Override public int getDSFID() { return PR_FETCH_KEYS_REPLY_MESSAGE; } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.seriesNum = in.readInt(); this.msgNum = in.readInt(); this.numSeries = in.readInt(); this.lastInSeries = in.readBoolean(); this.chunk = DataSerializer.readByteArray(in); } @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append("FetchKeysReplyMessage ").append("processorid=").append(this.processorId); if (getSender() != null) { sb.append(",sender=").append(this.getSender()); } sb.append(",seriesNum=").append(seriesNum).append(",msgNum=").append(msgNum) .append(",numSeries=").append(numSeries).append(",lastInSeries=").append(lastInSeries); if (chunkStream != null) { sb.append(",size=").append(chunkStream.size()); } else if (chunk != null) { sb.append(",size=").append(chunk.length); } if (getException() != null) { sb.append(", exception=").append(getException()); } return sb.toString(); } } /** * A processor to capture the value returned by * {@link org.apache.geode.internal.cache.partitioned.GetMessage.GetReplyMessage} * * @since GemFire 5.0 */ public static class FetchKeysResponse extends PartitionResponse { private final PartitionedRegion pr; private final Set returnValue; /** * lock used to synchronize chunk processing */ private final Object endLock = new Object(); /** * number of chunks processed */ private volatile int chunksProcessed; /** * chunks expected (set when last chunk has been processed */ private volatile int chunksExpected; /** * whether the last chunk has been processed */ private volatile boolean lastChunkReceived; public FetchKeysResponse(InternalDistributedSystem ds, PartitionedRegion pr, Set recipients) { super(ds, recipients); this.pr = pr; returnValue = new HashSet(); } void processChunk(FetchKeysReplyMessage msg) { // this processing algorighm won't work well if there are multiple recipients. currently the // retry logic for failed recipients is in PartitionedRegion. If we parallelize the sending // of this message, we'll need to handle failover in this processor class and track results // differently. boolean doneProcessing = false; if (msg.getException() != null) { process(msg); } else { try { ByteArrayInputStream byteStream = new ByteArrayInputStream(msg.chunk); DataInputStream in = new DataInputStream(byteStream); while (in.available() > 0) { Object key = DataSerializer.readObject(in); if (key != null) { synchronized (returnValue) { returnValue.add(key); } } else { // null should signal the end of the set of keys Assert.assertTrue(in.available() == 0); } } synchronized (this.endLock) { chunksProcessed = chunksProcessed + 1; if (((msg.seriesNum + 1) == msg.numSeries) && msg.lastInSeries) { lastChunkReceived = true; chunksExpected = msg.msgNum + 1; } if (lastChunkReceived && (chunksExpected == chunksProcessed)) { doneProcessing = true; } if (logger.isTraceEnabled(LogMarker.DM_VERBOSE)) { logger.trace(LogMarker.DM_VERBOSE, "{} chunksProcessed={},lastChunkReceived={},chunksExpected={},done={}", this, chunksProcessed, lastChunkReceived, chunksExpected, doneProcessing); } } } catch (Exception e) { processException(new ReplyException( LocalizedStrings.FetchKeysMessage_ERROR_DESERIALIZING_KEYS.toLocalizedString(), e)); checkIfDone(); // fix for hang in 41202 } // if all chunks have been received, wake up the waiting thread if (doneProcessing) { process(msg); } } } /** * @return Set the keys associated with the bucketid of the {@link FetchKeysMessage} * @throws ForceReattemptException if the peer is no longer available */ public Set waitForKeys() throws ForceReattemptException { try { waitForRepliesUninterruptibly(); } catch (ReplyException e) { Throwable t = e.getCause(); if (t instanceof CancelException) { logger.debug("FetchKeysResponse got remote CacheClosedException; forcing reattempt. {}", t.getMessage(), t); throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_FETCHKEYSRESPONSE_GOT_REMOTE_CACHECLOSEDEXCEPTION_FORCING_REATTEMPT .toLocalizedString(), t); } if (t instanceof ForceReattemptException) { logger.debug("FetchKeysResponse got remote ForceReattemptException; rethrowing. {}", e.getMessage(), e); throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_PEER_REQUESTS_REATTEMPT.toLocalizedString(), t); } e.handleCause(); } if (!this.lastChunkReceived) { throw new ForceReattemptException( LocalizedStrings.FetchKeysMessage_NO_REPLIES_RECEIVED.toLocalizedString()); } return this.returnValue; } } }
/** * Simple financial systemic risk simulator for Java * http://code.google.com/p/systemic-risk/ * * Copyright (c) 2011, 2012 * Gilbert Peffer, CIMNE * gilbert.peffer@gmail.com * All rights reserved * * This software is open-source under the BSD license; see * http://code.google.com/p/systemic-risk/wiki/SoftwareLicense */ package info.financialecology.finance.abm.simulation; import info.financialecology.finance.utilities.Assertion; import info.financialecology.finance.utilities.datastruct.SimulationParameters; import java.io.FileNotFoundException; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import cern.colt.list.DoubleArrayList; import cern.colt.list.IntArrayList; /** * A simple parameter class that uses xstream to read and write object parameters from and to files. * See also: * Xstream: http://tinyurl.com/66o9od * "Use XStream to serialize Java objects into XML": http://tinyurl.com/6ah27g * * Parser for parameters to read sequences of numbers and sequences of intervals * * To create the parameter file from this class, use writeParameterDefinition * * Adding a new parameter * ---------------------- * 1. Declare the parameter variable in PARAMETER DECLARATIONS * 1a. Intervals and sequences are declared as String and assigned "NAN" as the default value * 2. Define an enum for the parameter in Sequence * 3. If the parameter is a sequence, add an entry to Sequence.length() * * * * * @author Gilbert Peffer * */ public class LPLSEqnParams extends SimulationParameters { private static final Logger logger = (Logger)LoggerFactory.getLogger(LPLSEqnParams.class.getSimpleName()); /** * * PARAMETER DECLARATIONS * ====================== * * Abbreviations * ------------- * MF: mutual fund * HF: hedge fund * B: bank * FUND: fundamental strategy * TREND: trend following strategy * LS: long-short strategy * * * */ // MARKET PARAMETERS public int nRuns = 0; // number of simulation runs public int nTicks = 0; // number of ticks per simulation run public int nAssets = 0; // number of assets public int nValueInvestors = 0; // number of value investors public int nLongShortInvestors = 0; // number of long-short investors public int nTrendFollowers = 0; // number of trend followers public int nAgents = 0; // optional: total number of agents (proportional - nMutualFunds : nHedgeFunds : nBanks) public boolean constCapFac = true; // whether agents use a constant capital factor // Market - for each ASSET public String liquidity = "NAN"; // asset liquidities: Array{liquidity_i} public String price_0 = "NAN"; // initial asset prices: Array{price_0_i} public String priceNoise = "NAN"; // asset price noise processes: N[mean, vol] public String refValue = "NAN"; // asset reference log-value processes: N[mean, vol] // AGENT PARAMETERS // All agents public String cash = "NAN"; // agent's initial cash: range defined by U[min, max] // Value investor, for each ASSET public String offsetValue = "NAN"; // asset and investor-specific value offsets: ranges defined by Array{U[min_i, max_i]} // Value investor public String entryVALUE = "NAN"; // investor-specific entry threshold: range defined by U[min, max] public String exitVALUE = "NAN"; // investor-specific exit thresholds: range defined by U[min, max] public double aVALUE = 0.0; // scale parameter for capital assignment public Boolean shortSellingAllowed_VALUE = false; // short selling constraint (true: short selling allowed; false: no short selling allowed) public Boolean borrowingAllowed_VALUE = false; // borrowing constraint (true: can borrow unlimited amounts of cash; false: cannot borrow) // Long-short investor public String entryLS = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific entry thresholds public String exitLS = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific exit thresholds public String mawinLS = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific moving average window public String returnPeriodLS = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific period over which return is calculated public double aLS = 0.0; // scale parameter for capital assignment // Trend follower public String entryTREND = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific entry thresholds public String exitTREND = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific exit thresholds public String delayTREND = "NAN"; // interval vector [min, max] for uniform distributions of trader-specific delay public double aTREND = 0.0; // scale parameter for capital assignment public Boolean shortSellingAllowed_TREND = false; // short selling constraint (true: short selling allowed; false: no short selling allowed) public Boolean borrowingAllowed_TREND = false; // borrowing constraint (true: can borrow unlimited amounts of cash; false: cannot borrow) public enum Item { NUMBER, INTERVAL, NUMBER_SEQ, INTERVAL_SEQ; } /* * This enum defines all parameters of non-primitive type. Non-primitive * types are declared as Strings in the XML parameter file and then * internally transformed to the corresponding data types. * * Example: liquidity is defined as a sequence of doubles. The length * of that sequence is given by the method length(). In this case there * are as many liquidity parameters as there are assets (params.nAssets) */ public enum Sequence { /** * Parameters * ---------- * - label * - primitive type * - item type (as defined in enum Item) * */ LIQUIDITY("liquidity", Double.class, Item.NUMBER_SEQ), PRICE_0("price_0", Double.class, Item.NUMBER_SEQ), PRICE_NOISE("priceNoise", Double.class, Item.INTERVAL_SEQ), REF_VALUE("refValue", Double.class, Item.INTERVAL_SEQ), OFFSET_VALUE("offsetValue", Double.class, Item.INTERVAL_SEQ), CASH("cash", Double.class, Item.INTERVAL), ENTRY_VALUE("entryVALUE", Double.class, Item.INTERVAL), EXIT_VALUE("exitVALUE", Double.class, Item.INTERVAL), ENTRY_LS("entryLS", Double.class, Item.INTERVAL), EXIT_LS("exitLS", Double.class, Item.INTERVAL), MA_WIN_LS("mawinLS", Integer.class, Item.INTERVAL), R_PERIOD_LS("returnPeriodLS", Integer.class, Item.INTERVAL), ENTRY_TREND("entryTREND", Double.class, Item.INTERVAL), EXIT_TREND("exitTREND", Double.class, Item.INTERVAL), DELAY_TREND("delayTREND", Integer.class, Item.INTERVAL); private final String label; private final Type type; private final Item itemType; Sequence(String label, Type type, Item itemType) { this.label = label; this.type = type; this.itemType = itemType; } public Type type() { return type; } public String label() { return label; } public Item itemType() { return itemType; } /** * Some parameter sequences have to have a specific length. This method declares the length * of a sequence. Where there is no constraint on the length of a sequence, return '0'. Only * declare parameters that are sequences, not those that are single numbers or intervals * * @param params the parameter object * @return the constraint on the length of the sequence * */ public int length(LPLSEqnParams params) { switch (this) { case LIQUIDITY: return params.nAssets; case PRICE_0: return params.nAssets; case PRICE_NOISE: return params.nAssets; case REF_VALUE: return params.nAssets; case OFFSET_VALUE: return params.nAssets; default: if (this.itemType() == Item.INTERVAL) return 1; Assertion.assertStrict(false, Assertion.Level.ERR, "Cannot find sequence type '" + this.label() + "'"); } return -1; } } LPLSEqnParams() {} /** * Validate the parameters * - constraints on their value * - constraints on sequences of parameters, e.g. their number is equal to the number of assets * * @return true, if the parameter object is valid * */ public Boolean validate() { ArrayList<IntArrayList> iSeq = null; // integer sequences ArrayList<DoubleArrayList> dSeq = null; // sequences of doubles /** * * MARKET PARAMETERS * */ /** * VALIDATE: primitive types (nRuns, nTicks, ...) */ Assertion.assertStrict(nRuns > 0, Assertion.Level.ERR, "nRuns = " + nRuns + ": needs to be '> 0'"); Assertion.assertStrict(nTicks > 0, Assertion.Level.ERR, "nTicks = " + nTicks + ": needs to be '> 0'"); Assertion.assertStrict(nAssets > 0, Assertion.Level.ERR, "nAssets = " + nAssets + ": needs to be '> 0'"); Assertion.assertStrict(nValueInvestors >= 0, Assertion.Level.ERR, "nValueInvestors = " + nValueInvestors + ": needs to be '>= 0'"); Assertion.assertStrict(nLongShortInvestors >= 0, Assertion.Level.ERR, "nLongShortInvestors = " + nLongShortInvestors + ": needs to be '>= 0'"); Assertion.assertStrict(nTrendFollowers >= 0, Assertion.Level.ERR, "nBanks = " + nTrendFollowers + ": needs to be greater than '0'"); Assertion.assertStrict(nAgents >= 0, Assertion.Level.ERR, "nAgents = " + nAgents + ": needs to be greater than '0'"); /** * VALIDATE: liquidity {seq-number} */ dSeq = getDoubleSequence(Sequence.LIQUIDITY); validateDoubleSequence(dSeq, Sequence.LIQUIDITY); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) Assertion.assertStrict((i.next().get(0) > 0), Assertion.Level.ERR, "Parameter '" + Sequence.LIQUIDITY.label() + "': has to be '> 0'"); /** * VALIDATE: price_0 {seq-number} */ dSeq = getDoubleSequence(Sequence.PRICE_0); validateDoubleSequence(dSeq, Sequence.PRICE_0); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) Assertion.assertStrict((i.next().get(0) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.PRICE_0.label() + "': has to be '>= 0'"); /** * VALIDATE: priceNoise {seq-interval N[mu, sigma]} (one for each asset) */ dSeq = getDoubleSequence(Sequence.PRICE_NOISE); validateDoubleSequence(dSeq, Sequence.PRICE_NOISE); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict(dal.get(1) >= 0, Assertion.Level.ERR, "Parameter '" + Sequence.REF_VALUE.label() + "'(sigma): has to be '>= 0'"); } /** * VALIDATE: refValue {seq-interval N[mu, sigma]} (one for each asset) */ dSeq = getDoubleSequence(Sequence.REF_VALUE); validateDoubleSequence(dSeq, Sequence.REF_VALUE); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict(dal.get(1) >= 0, Assertion.Level.ERR, "Parameter '" + Sequence.REF_VALUE.label() + "'(sigma): has to be '>= 0'"); } /** * * AGENT PARAMETERS * */ /** * VALIDATE: cash {seq-number} */ dSeq = getDoubleSequence(Sequence.CASH); validateDoubleSequence(dSeq, Sequence.CASH); /** * VALIDATE: offsetValue {seq-interval U[min, max]} (one for each asset) */ dSeq = getDoubleSequence(Sequence.OFFSET_VALUE); validateDoubleSequence(dSeq, Sequence.OFFSET_VALUE); validateMinMaxInterval(dSeq, Sequence.OFFSET_VALUE); /** * VALIDATE: entryVALUE {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.ENTRY_VALUE); validateDoubleSequence(dSeq, Sequence.ENTRY_VALUE); validateMinMaxInterval(dSeq, Sequence.ENTRY_VALUE); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.ENTRY_VALUE.label() + "': has to be '>= 0'"); } /** * VALIDATE: exitVALUE {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.EXIT_VALUE); validateDoubleSequence(dSeq, Sequence.EXIT_VALUE); validateMinMaxInterval(dSeq, Sequence.EXIT_VALUE); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); // TODO add a check |tau|<|T| // Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + // Sequence.TAU_FUND_MF.label() + "': has to be '>= 0'"); } /** * VALIDATE: aVALUE {number} */ Assertion.assertStrict(aVALUE >= 0, Assertion.Level.ERR, "Parameter 'aFUND' has to be '>= 0'"); /** * VALIDATE: entryTREND {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.ENTRY_TREND); validateDoubleSequence(dSeq, Sequence.ENTRY_TREND); validateMinMaxInterval(dSeq, Sequence.ENTRY_TREND); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.ENTRY_TREND.label() + "': has to be '>= 0'"); } /** * VALIDATE: exitTREND {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.EXIT_TREND); validateDoubleSequence(dSeq, Sequence.EXIT_TREND); validateMinMaxInterval(dSeq, Sequence.EXIT_TREND); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.EXIT_TREND.label() + "': has to be '>= 0'"); } /** * VALIDATE: delayTREND {interval U[min, max]} */ iSeq = getIntegerSequence(Sequence.DELAY_TREND); validateIntegerSequence(iSeq, Sequence.DELAY_TREND); validateMinMaxInterval(iSeq, Sequence.DELAY_TREND); for (Iterator<IntArrayList> i = iSeq.iterator(); i.hasNext(); ) { IntArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.DELAY_TREND.label() + "': has to be '>= 0'"); } /** * VALIDATE: aTREND {number} */ Assertion.assertStrict(aTREND >= 0, Assertion.Level.ERR, "Parameter 'aTREND' has to be '>= 0'"); /** * VALIDATE: entryLS {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.ENTRY_LS); validateDoubleSequence(dSeq, Sequence.ENTRY_LS); validateMinMaxInterval(dSeq, Sequence.ENTRY_LS); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.ENTRY_LS.label() + "': has to be '>= 0'"); } /** * VALIDATE: exitLS {interval U[min, max]} */ dSeq = getDoubleSequence(Sequence.EXIT_LS); validateDoubleSequence(dSeq, Sequence.EXIT_LS); validateMinMaxInterval(dSeq, Sequence.EXIT_LS); for (Iterator<DoubleArrayList> i = dSeq.iterator(); i.hasNext(); ) { DoubleArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.EXIT_LS.label() + "': has to be '>= 0'"); } /** * VALIDATE: mawinLS {interval U[min, max]} */ iSeq = getIntegerSequence(Sequence.MA_WIN_LS); validateIntegerSequence(iSeq, Sequence.MA_WIN_LS); validateMinMaxInterval(iSeq, Sequence.MA_WIN_LS); for (Iterator<IntArrayList> i = iSeq.iterator(); i.hasNext(); ) { IntArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.MA_WIN_LS.label() + "': has to be '>= 0'"); } /** * VALIDATE: returnPeriodLS {interval U[min, max]} */ iSeq = getIntegerSequence(Sequence.R_PERIOD_LS); validateIntegerSequence(iSeq, Sequence.R_PERIOD_LS); validateMinMaxInterval(iSeq, Sequence.R_PERIOD_LS); for (Iterator<IntArrayList> i = iSeq.iterator(); i.hasNext(); ) { IntArrayList dal = i.next(); Assertion.assertStrict((dal.get(0) >= 0) && (dal.get(1) >= 0), Assertion.Level.ERR, "Parameter '" + Sequence.R_PERIOD_LS.label() + "': has to be '>= 0'"); } /** * VALIDATE: aLS {number} */ Assertion.assertStrict(aLS >= 0, Assertion.Level.ERR, "Parameter 'aLS' has to be '>= 0'"); return true; } private void validateIntegerSequence(ArrayList<IntArrayList> iSeq, Sequence sequence) { Assertion.assertStrict(iSeq.size() > 0, Assertion.Level.ERR, "Parameter missing: '" + sequence.label() + "'"); if (sequence.itemType() == Item.NUMBER_SEQ) Assertion.assertStrict((iSeq.get(0).size() == 1), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected numbers, not intervals"); else if (sequence.itemType() == Item.INTERVAL_SEQ) Assertion.assertStrict((iSeq.get(0).size() == 2), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected intervals, not numbers"); else if (sequence.itemType() == Item.INTERVAL) Assertion.assertStrict(((iSeq.get(0).size() == 2) || (iSeq.size() > 1)), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected single interval"); else Assertion.assertStrict(false, Assertion.Level.ERR, "Can't validate for this item type"); // TODO add label (string) to Item to identify item type expandIntegerSequence(iSeq, sequence); Assertion.assertStrict((iSeq.size() == sequence.length(this)) && (sequence.length(this) != 0), Assertion.Level.ERR, "Number of '" + sequence.label() + "' " + "parameters (" + iSeq.size() + ") has to be equal to " + sequence.length(this)); } private void validateDoubleSequence(ArrayList<DoubleArrayList> dSeq, Sequence sequence) { Assertion.assertStrict(dSeq.size() > 0, Assertion.Level.ERR, "Parameter missing: '" + sequence.label() + "'"); if (sequence.itemType() == Item.NUMBER_SEQ) Assertion.assertStrict((dSeq.get(0).size() == 1), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected numbers, not intervals"); else if (sequence.itemType() == Item.INTERVAL_SEQ) Assertion.assertStrict((dSeq.get(0).size() == 2), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected numbers, not intervals"); else if (sequence.itemType() == Item.INTERVAL) Assertion.assertStrict(((dSeq.get(0).size() == 2) || (dSeq.size() > 1)), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': expected single interval"); else Assertion.assertStrict(false, Assertion.Level.ERR, "Can't validate for this item type"); // TODO add label (string) to Item to identify item type expandDoubleSequence(dSeq, sequence); Assertion.assertStrict((dSeq.size() == sequence.length(this)) && (sequence.length(this) != 0), Assertion.Level.ERR, "Number of '" + sequence.label() + "' " + "parameters (" + dSeq.size() + ") has to be equal to " + sequence.length(this)); } @SuppressWarnings({ "unchecked", "rawtypes" }) private void validateMinMaxInterval(ArrayList dSeq, Sequence sequence) { ArrayList<IntArrayList> ial; ArrayList<DoubleArrayList> dal; if (sequence.type == Integer.class) { ial = (ArrayList<IntArrayList>) dSeq; for (int i = 0; i < ial.size(); i++) { Assertion.assertStrict(ial.get(i).size() == 2, Assertion.Level.ERR, "Error in 'validateMinMaxInterval': expected interval"); Assertion.assertStrict(ial.get(i).get(0) <= ial.get(i).get(1), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': " + "the lower bound of intervals has to be smaller or equal to their upper bound"); } } else if (sequence.type == Double.class) { dal = (ArrayList<DoubleArrayList>) dSeq; for (int i = 0; i < dal.size(); i++) { Assertion.assertStrict(dal.get(i).size() == 2, Assertion.Level.ERR, "Error in 'validateMinMaxInterval': expected interval"); Assertion.assertStrict(dal.get(i).get(0) <= dal.get(i).get(1), Assertion.Level.ERR, "Parameter '" + sequence.label() + "': " + "the lower bound of intervals has to be smaller or equal to their upper bound"); } } } /** * Though there is some redundancy, these functions are still useful because of better readability */ public ArrayList<IntArrayList> getIntegerNumberSequence(Sequence sequence) { ArrayList<IntArrayList> iSeq = getIntegerSequence(sequence); validateIntegerSequence(iSeq, sequence); return iSeq; } public ArrayList<IntArrayList> getIntegerInterval(Sequence sequence) { return getIntegerIntervalSequence(sequence); } public ArrayList<IntArrayList> getIntegerIntervalSequence(Sequence sequence) { ArrayList<IntArrayList> iSeq = getIntegerSequence(sequence); validateIntegerSequence(iSeq, sequence); return iSeq; } public ArrayList<DoubleArrayList> getDoubleNumberSequence(Sequence sequence) { ArrayList<DoubleArrayList> dSeq = getDoubleSequence(sequence); validateDoubleSequence(dSeq, sequence); return dSeq; } public ArrayList<DoubleArrayList> getDoubleInterval(Sequence sequence) { return getDoubleIntervalSequence(sequence); } public ArrayList<DoubleArrayList> getDoubleIntervalSequence(Sequence sequence) { ArrayList<DoubleArrayList> dSeq = getDoubleSequence(sequence); validateDoubleSequence(dSeq, sequence); return dSeq; } /** * Transforms a sequence of strings into integers or intervals of integers. Checks whether the sequence * contains integers rather than doubles * * Returns a sequence of single numbers or of intervals */ private ArrayList<IntArrayList> getIntegerSequence(Sequence sequence) { ArrayList<IntArrayList> intSeq = new ArrayList<IntArrayList>(); ArrayList<String> seq = null; String s = null; // TODO use reflection to access the field, based on sequence.label switch (sequence) { case DELAY_TREND: s = delayTREND; break; case MA_WIN_LS: s = mawinLS; break; case R_PERIOD_LS: s = returnPeriodLS; break; default: Assertion.assertStrict(false, Assertion.Level.ERR, "Unkown sequence type passed into 'getIntegerSequence'"); } seq = parseParamSequence(s); if (s.indexOf("[") == -1) { // a sequence of numbers, not intervals for (int i = 0; i < seq.size(); i++) { IntArrayList value = new IntArrayList(new int [] {0}); // forces the array to be initialised, otherwise 'set' won't work Assertion.assertStrict(isInteger(seq.get(i)), Assertion.Level.ERR, "The values in " + s + " = " + seq + " have to be integers"); value.set(0, Integer.valueOf(seq.get(i))); intSeq.add(value); } } else { // is it a sequence of intervals? String [] item; for (int i = 0; i < seq.size(); i++) { IntArrayList interval = new IntArrayList(new int [] {0,0}); // forces the array to be initialised, otherwise 'set' won't work item = seq.get(i).replace("[", "").split("[\\[\\],]"); Assertion.assertStrict(isInteger(item[0]) && isInteger(item[1]), Assertion.Level.ERR, "The values in " + s + " = " + seq + " have to be integers"); interval.set(0, Integer.valueOf(item[0])); interval.set(1, Integer.valueOf(item[1])); intSeq.add(interval); } } if ((intSeq.size() == 1) && (sequence.length(this) > 1)) // one item of the sequence is provided, copy if more are required expandIntegerSequence(intSeq, sequence); Assertion.assertStrict((intSeq.size() == sequence.length(this)) && (sequence.length(this) != 0), Assertion.Level.ERR, intSeq.size() + " items in the sequence '" + sequence.label + "', different to the " + sequence.length(this) + " required"); return intSeq; } /** * Transforms a sequence of strings into doubles or intervals of doubles */ private ArrayList<DoubleArrayList> getDoubleSequence(Sequence sequence) { ArrayList<DoubleArrayList> doubleSeq = new ArrayList<DoubleArrayList>(); ArrayList<String> seq = null; String s = null; switch (sequence) { case LIQUIDITY: s = liquidity; break; case PRICE_0: s = price_0; break; case CASH: s = cash; break; case PRICE_NOISE: s = priceNoise; break; case REF_VALUE: s = refValue; break; case OFFSET_VALUE: s = offsetValue; break; case ENTRY_VALUE: s = entryVALUE; break; case EXIT_VALUE: s = exitVALUE; break; case ENTRY_LS: s = entryLS; break; case EXIT_LS: s = exitLS; break; case ENTRY_TREND: s = entryTREND; break; case EXIT_TREND: s = exitTREND; break; default: Assertion.assertStrict(false, Assertion.Level.ERR, "Unkown sequence type '" + sequence.label() + "' " + "passed into 'getDoubleSequence'"); } seq = parseParamSequence(s); if (s.indexOf("[") == -1) { // a sequence of numbers, not intervals for (int i = 0; i < seq.size(); i++) { DoubleArrayList value = new DoubleArrayList(new double [] {0.0}); // forces the array to be initialised, otherwise 'set' won't work value.set(0, Double.valueOf(seq.get(i))); doubleSeq.add(value); } } else { // is it a sequence of intervals? String [] item; for (int i = 0; i < seq.size(); i++) { DoubleArrayList interval = new DoubleArrayList(new double [] {0.0,0.0}); // forces the array to be initialised, otherwise 'set' won't work item = seq.get(i).replace("[", "").split("[\\[\\],]"); interval.set(0, Double.valueOf(item[0])); interval.set(1, Double.valueOf(item[1])); doubleSeq.add(interval); } } if ((doubleSeq.size() == 1) && (sequence.length(this) > 1)) // one item of the sequence is provided, copy if more are required expandDoubleSequence(doubleSeq, sequence); Assertion.assertStrict((doubleSeq.size() == sequence.length(this)) && (sequence.length(this) != 0), Assertion.Level.ERR, doubleSeq.size() + " items in the sequence '" + sequence.label + "', different to the " + sequence.length(this) + " required"); return doubleSeq; } /** * * @param s * @return */ private static ArrayList<String> parseParamSequence(String s) { ArrayList<String> seq = new ArrayList<String>(); String [] vec = null; String [] vec2 = null; String [] vec3 = null; s = s.replaceAll("\\s",""); // remove all white spaces // logger.trace("{}", s + "-"); String s_ext = s + "-"; // closing paranethesis is at last position of sting, so for 'split' to work we append an arbitrary character // logger.trace("{} {}", s_ext.split("\\(").length, s_ext.split("\\)").length); Assertion.assertStrict(s_ext.split("\\(").length == s_ext.split("\\)").length, Assertion.Level.ERR, "The number of open and close parentheses doesn't coincide in '" + s + "'"); Assertion.assertStrict(s.split("\\(").length <= 2, Assertion.Level.ERR, "The expression '" + s + "' has to contain no or exactly one set of round parentheses"); int nOpen = s.split("\\[").length - 1; int nClose = s.split("\\]").length - 1; int nColons = 0; if ((s.startsWith("[")) && (s.endsWith("]"))) // a single interval nOpen = nClose = 1; Assertion.assertStrict(nOpen == nClose, Assertion.Level.ERR, "The number of open and close square brackets doesn't coincide"); vec = s.split("\\("); // Command if (vec.length > 1) nColons = vec[1].split(":").length - 1; /** * Parsing sequences * * c(1,2,3,4) * c([1,2],[3,4],[5,6]) * c(1:4) = 1,2,3,4 * c(1:2:4) = 0.5,1.0,1.5,2.0 - '4' is the repeater argument * rep(9,3) = 9,9,9 * rep([1,4],2) = [1,4],[1,4] * */ if (vec.length == 1) { // either a single number or interval if (nOpen == 0) { // a single number Assertion.assertStrict(isNumber(s), Assertion.Level.ERR, "The expression " + s + " is not a numeric"); seq.add(s); } else if (nOpen == 1) { // an interval vec = s.split("[\\[\\],]"); for (int i = 0; i < vec.length; i++) if (isNumber(vec[i])) seq.add(vec[i]); Assertion.assertStrict(seq.size() == 2, Assertion.Level.ERR, "The interval " + s + " has to contain two numbers"); String interval = "[" + seq.get(0) + "," + seq.get(1) + "]"; seq.clear(); seq.add(interval); } } else if (vec[0].equalsIgnoreCase("c")) { if (nOpen == 0) { // single numbers, not intervals if (nColons == 0) { // no repeaters vec2 = vec[1].split("[\\),]"); seq = new ArrayList<String>(Arrays.asList(vec2)); } else { // single numbers, repeaters double start, end; int points; Assertion.assertStrict(nColons <= 2, Assertion.Level.ERR, "There cannot be more than two columns in the expression for command 'c'"); vec2 = vec[1].split("[\\):]"); seq = new ArrayList<String>(Arrays.asList(vec2)); Boolean isDoubleSequence = false; if (!isInteger(seq.get(0)) || !isInteger(seq.get(1))) { // if the upper or lower bound is a double, then assume the sequence is a double isDoubleSequence = true; Assertion.assertStrict(nColons == 2, Assertion.Level.ERR, "You must specify the number of elements for a sequence of doubles"); points = Integer.valueOf(seq.get(2)); } else { // upper an lower bound are both integers points = Integer.valueOf(seq.get(1)) - Integer.valueOf(seq.get(0)) + 1; if (nColons == 1) { // if no repeater is provided, the sequence is integer isDoubleSequence = false; } else { // a repeater is provided if (((Math.abs(points) - 1) % (Integer.valueOf(seq.get(2)) - 1)) != 0) isDoubleSequence = true; // if the repeater doesn't split the interval along integer points, the sequence is double points = Integer.valueOf(seq.get(2)); } } start = Double.valueOf(seq.get(0)); // for integer sequences, we convert all numbers to integers further below end = Double.valueOf(seq.get(1)); if (nColons == 2) { Assertion.assertStrict(Math.abs(points) > 1, Assertion.Level.ERR, "Error in expression " + s + ". Number of points to generate " + "has to be greater than '0'"); if ((points > 0) && (start > end)) Assertion.assertStrict(false, Assertion.Level.ERR, "Error in expression " + s + ". End has to be greater than start, " + "for an increasing sequence"); else if ((points < 0) && (start < end)) Assertion.assertStrict(false, Assertion.Level.ERR, "Error in expression " + s + ". Start has to be greater than end, " + "for a decreasing sequence"); } seq.clear(); double interval = (double) end - (double) start; double value; // Example sequence in interval 1:9 and with three points: |1| 2 3 4 |5| 6 7 8 |9| for (int i = 0; i < Math.abs(points); i++) { value = (double) start + (interval / (double) (Math.abs(points) - 1)) * i; if (isDoubleSequence == true) seq.add(Double.toString(value)); else seq.add(Integer.toString((int) value)); } } } else if (nOpen > 0) { // a sequence of intervals Assertion.assertStrict(nColons == 0, Assertion.Level.ERR, "A sequence of intervals cannot contain a column ':'"); vec2 = vec[1].split("[\\[\\]\\)]"); for (int i = 0; i < vec2.length; i++) { vec3 = vec2[i].split(","); if (vec3.length == 2) seq.add(vec2[i]); } } } else if (vec[0].equalsIgnoreCase("rep")) { if (nOpen == 0) { Assertion.assertStrict(nColons == 0, Assertion.Level.ERR, "A sequence of intervals cannot contain a column ':'"); vec2 = vec[1].split("[\\),]"); seq = new ArrayList<String>(); Assertion.assertStrict(isInteger(vec2[1]), Assertion.Level.ERR, "The repeater variable in expression " + s + " has to be integers"); int repeat = Integer.valueOf(vec2[1]); for (int i = 0; i < repeat; i++) seq.add(vec2[0]); } else if (nOpen > 0) { Assertion.assertStrict(nOpen == 1, Assertion.Level.ERR, "Error in expression " + s + ". There can only be one pair of square brackets"); vec2 = vec[1].split("[\\[\\]\\)]"); for (int i = 0; i < vec2.length; i++) { vec3 = vec2[i].split("[,]+"); if (vec3.length == 2) { if ((vec3[0].length() == 0) && (vec3[1].length() != 0)) seq.add(vec3[1]); else if ((vec3[1].length() == 0) && (vec3[0].length() != 0)) seq.add(vec3[0]); else if ((vec3[0].length() != 0) && (vec3[1].length() != 0)) seq.add(vec2[i]); } } Assertion.assertStrict(seq.size() == 2, Assertion.Level.ERR, "Error in expression " + s); Assertion.assertStrict(isInteger(seq.get(1)), Assertion.Level.ERR, "The repeater variable in expression " + s + " has to be an integer"); String interval = "[" + seq.get(0) + "]"; int repeat = Integer.valueOf(seq.get(1)); seq.clear(); for (int i = 0; i < repeat; i++) seq.add(interval); } } return seq; } /** * If only one number or interval is provided where a sequence is expected, automatically add the required number of copies * * @param dSeq * @param size */ private void expandDoubleSequence(ArrayList<DoubleArrayList> dSeq, Sequence sequence) { int size = sequence.length(this); if ((dSeq.size() == 1) && size > 1) { // expand sequence if only one item is provided DoubleArrayList dal; for (int i = 1; i < size; i++) { dal = new DoubleArrayList(); dal.add(dSeq.get(0).get(0)); if (dSeq.get(0).size() == 2) dal.add(dSeq.get(0).get(1)); dSeq.add(dal); } } else if ((dSeq.size() != size) && (sequence.length(this) != 0)) Assertion.assertStrict(false, Assertion.Level.ERR, "Trying to expand sequence '" + sequence.label() + "' that " + "contains more than one number or interval"); } /** * If only one number or interval is provided where a sequence is expected, automatically add the required number of copies * * @param iSeq * @param size */ private void expandIntegerSequence(ArrayList<IntArrayList> iSeq, Sequence sequence) { int size = sequence.length(this); if ((iSeq.size() == 1) && size > 1) { // expand sequence if only one item is provided IntArrayList dal; for (int i = 1; i < size; i++) { dal = new IntArrayList(); dal.add(iSeq.get(0).get(0)); if (iSeq.get(0).size() == 2) dal.add(iSeq.get(0).get(1)); iSeq.add(dal); } } else if ((iSeq.size() != size) && (sequence.length(this) != 0)) Assertion.assertStrict(false, Assertion.Level.ERR, "Trying to expand sequence '" + sequence.label() + "' that " + "contains more than one number or interval"); } /** * Creates an xml file that holds the fields of this object * * @param file * @throws FileNotFoundException */ public static void writeParamDefinition(String file) throws FileNotFoundException { writeParamsDefinition(file, new LPLSEqnParams()); } /** * Reads values from an xml file and initialises the fields of the newly created parameter object * * @param file * @return * @throws FileNotFoundException */ public static LPLSEqnParams readParameters(String file) throws FileNotFoundException { return (LPLSEqnParams) readParams(file, new LPLSEqnParams()); } }
/* * Copyright (c) 2015, Turn Inc. All Rights Reserved. * Use of this source code is governed by a BSD-style license that can be found * in the LICENSE file. */ package com.turn.sorcerer.status; import com.turn.sorcerer.injector.SorcererInjector; import com.turn.sorcerer.pipeline.type.PipelineType; import com.turn.sorcerer.task.type.TaskType; import java.io.IOException; import org.apache.commons.lang.exception.ExceptionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.joda.time.DateTime; /** * Class Description Here * * @author tshiou */ public class StatusManager { private static final Logger logger = LoggerFactory.getLogger(StatusManager.class); private static final StatusManager INSTANCE= new StatusManager(); public static StatusManager get() { return INSTANCE; } StatusStorage taskStorage = SorcererInjector.get().getStorageInstance().setType("tasks"); StatusStorage pipelineStorage = SorcererInjector.get().getStorageInstance().setType("pipelines"); private StatusManager() { } public boolean initialized() { try { taskStorage.init(); pipelineStorage.init(); } catch (IOException e) { logger.debug("Storage layer could not initialize", e); return false; } return true; } public void commitTaskStatus(TaskType type, int seq, Status status) { commitTaskStatus(type.getName(), seq, status, DateTime.now(), false); } public void commitTaskStatus(TaskType type, int seq, Status status, boolean overwrite) { commitTaskStatus(type.getName(), seq, status, DateTime.now(), overwrite); } private void commitTaskStatus( String taskName, int seq, Status status, DateTime time, boolean overwrite) { if (Status.PENDING.equals(status)) { try { taskStorage.clearAllStatuses(taskName, seq); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } return; } try { taskStorage.commitStatus(taskName, seq, status, time, overwrite); } catch (IOException e) { logger.error(ExceptionUtils.getStackTrace(e)); } } public Status checkTaskStatus(String taskName, int seq) { try { return taskStorage.checkStatus(taskName, seq); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } return Status.PENDING; } public Status checkPipelineStatus(String pipelineName, int seq) { try { return pipelineStorage.checkStatus(pipelineName, seq); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } return Status.PENDING; } public void removeInProgressTaskStatus(TaskType type, int seq) { removeInProgressTaskStatus(type.getName(), seq); } private void removeInProgressTaskStatus(String taskName, int seq) { try { taskStorage.removeStatus(taskName, seq, Status.IN_PROGRESS); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } } public boolean isTaskComplete(TaskType type, int seq) { try { return Status.SUCCESS.equals(taskStorage.checkStatus(type.getName(), seq)); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return false; } } public boolean isTaskRunning(TaskType type, int seq) { try { return Status.IN_PROGRESS.equals(taskStorage.checkStatus(type.getName(), seq)); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return false; } } public boolean isTaskInError(TaskType type, int seq) { try { return Status.ERROR.equals(taskStorage.checkStatus(type.getName(), seq)); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return false; } } public void clearTaskStatus(TaskType type, int seq) { try { taskStorage.clearAllStatuses(type.getName(), seq); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } } public DateTime getTaskLastSuccessTime(TaskType type, int seq) { return getTaskLastUpdateTimeForStatus(type, seq, Status.SUCCESS); } public DateTime getTaskLastSuccessTime(String taskName, int seq) { try { return taskStorage.getStatusUpdateTime(taskName, seq, Status.SUCCESS); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return null; } } private DateTime getTaskLastUpdateTimeForStatus(TaskType type, int seq, Status status) { try { return taskStorage.getStatusUpdateTime(type.getName(), seq, status); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return null; } } public int getCurrentIterationNumberForPipeline(PipelineType type) { return getCurrentIterationNumberForPipeline(type.getName()); } public int getCurrentIterationNumberForPipeline(String pipelineName) { try { return pipelineStorage.getCurrentIterNo(pipelineName); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } return 0; } public void commitPipelineStatus(PipelineType type, int seq, Status status) { commitPipelineStatus(type.getName(), seq, status, DateTime.now(), false); } public void commitPipelineStatus(PipelineType type, int seq, Status status, boolean overwrite) { commitPipelineStatus(type.getName(), seq, status, DateTime.now(), overwrite); } private void commitPipelineStatus(String taskName, int seq, Status status, DateTime time, boolean overwrite) { try { pipelineStorage.commitStatus(taskName, seq, status, time, overwrite); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } } public boolean isPipelineComplete(PipelineType type, int seq) { try { return Status.SUCCESS.equals(pipelineStorage.checkStatus(type.getName(), seq)); } catch (IOException e) { logger.error("Storage layer unreachable!", e); return false; } } public void clearPipelineStatus(PipelineType type, int seq) { try { pipelineStorage.clearAllStatuses(type.getName(), seq); } catch (IOException e) { logger.error("Storage layer unreachable!", e); } } }
package org.workcraft.plugins.stg.commands; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import org.workcraft.NodeTransformer; import org.workcraft.commands.AbstractTransformationCommand; import org.workcraft.dom.Connection; import org.workcraft.dom.Container; import org.workcraft.dom.Model; import org.workcraft.dom.Node; import org.workcraft.dom.visual.MixUtils; import org.workcraft.dom.visual.TransformHelper; import org.workcraft.dom.visual.connections.VisualConnection; import org.workcraft.exceptions.InvalidConnectionException; import org.workcraft.plugins.petri.VisualReadArc; import org.workcraft.plugins.stg.SignalTransition.Direction; import org.workcraft.plugins.stg.SignalTransition.Type; import org.workcraft.plugins.stg.VisualSignalTransition; import org.workcraft.plugins.stg.VisualStg; import org.workcraft.util.DialogUtils; import org.workcraft.util.Hierarchy; import org.workcraft.util.LogUtils; import org.workcraft.util.Pair; import org.workcraft.workspace.ModelEntry; import org.workcraft.workspace.WorkspaceEntry; import org.workcraft.workspace.WorkspaceUtils; public class ExpandHandshakeTransformationCommand extends AbstractTransformationCommand implements NodeTransformer { private static final String SUFFIX_REQ = "_req"; private static final String SUFFIX_ACK = "_ack"; private HashSet<Node> expandedNodes = null; private Pair<String, String> suffixPair = null; @Override public String getDisplayName() { return "Expand selected handshake transitions..."; } @Override public String getPopupName() { return "Expand handshake transition..."; } @Override public boolean isApplicableTo(WorkspaceEntry we) { return WorkspaceUtils.isApplicable(we, VisualStg.class); } @Override public boolean isApplicableTo(Node node) { return node instanceof VisualSignalTransition; } @Override public boolean isEnabled(ModelEntry me, Node node) { return true; } @Override public Position getPosition() { return null; } @Override public Collection<Node> collect(Model model) { Collection<Node> signalTransitions = new HashSet<>(); if (model instanceof VisualStg) { VisualStg stg = (VisualStg) model; signalTransitions.addAll(stg.getVisualSignalTransitions()); signalTransitions.retainAll(stg.getSelection()); } return signalTransitions; } @Override public void transform(Model model, Collection<Node> nodes) { if (model instanceof VisualStg) { VisualStg stg = (VisualStg) model; expandedNodes = new HashSet<Node>(); suffixPair = getSufixes(); for (Node node: nodes) { transform(model, node); } stg.select(expandedNodes); expandedNodes = null; suffixPair = null; } } @Override public void transform(Model model, Node node) { if ((model instanceof VisualStg) && (node instanceof VisualSignalTransition)) { VisualStg stg = (VisualStg) model; VisualSignalTransition transition = (VisualSignalTransition) node; String ref = stg.getSignalReference(transition); Direction direction = transition.getDirection(); Container container = Hierarchy.getNearestContainer(transition); String reqSuffix = SUFFIX_REQ; String ackSuffix = SUFFIX_ACK; if (suffixPair != null) { reqSuffix = suffixPair.getFirst(); ackSuffix = suffixPair.getSecond(); } VisualSignalTransition reqTransition = stg.createVisualSignalTransition(ref + reqSuffix, Type.OUTPUT, direction, container); VisualSignalTransition ackTransition = stg.createVisualSignalTransition(ref + ackSuffix, Type.INPUT, direction, container); Pair<Point2D, Point2D> positionPair = getReqAckPositions(stg, transition); reqTransition.setRootSpacePosition(positionPair.getFirst()); ackTransition.setRootSpacePosition(positionPair.getSecond()); VisualConnection midConnection = null; try { midConnection = stg.connect(reqTransition, ackTransition); for (Connection connection: stg.getConnections(transition)) { Node predNode = connection.getFirst(); Node succNode = connection.getSecond(); if (connection instanceof VisualReadArc) { String predRef = stg.getNodeMathReference(predNode); String succRef = stg.getNodeMathReference(succNode); LogUtils.logWarning("Read-arc between '" + predRef + "' and '" + succRef + "' is ignored."); continue; } if (transition == succNode) { VisualConnection predConnection = stg.connect(predNode, reqTransition); predConnection.copyShape((VisualConnection) connection); predConnection.copyStyle((VisualConnection) connection); } if (transition == predNode) { VisualConnection succConnection = stg.connect(ackTransition, succNode); succConnection.copyStyle((VisualConnection) connection); succConnection.copyShape((VisualConnection) connection); } } } catch (InvalidConnectionException e) { } if (expandedNodes == null) { expandedNodes.add(reqTransition); expandedNodes.add(ackTransition); if (midConnection != null) { expandedNodes.add(midConnection); } } stg.remove(transition); } } private Pair<Point2D, Point2D> getReqAckPositions(VisualStg stg, VisualSignalTransition transition) { LinkedList<Point2D> predPoints = new LinkedList<>(); LinkedList<Point2D> succPoints = new LinkedList<>(); for (Connection connection: stg.getConnections(transition)) { if (connection instanceof VisualConnection) { VisualConnection visualConnection = (VisualConnection) connection; AffineTransform localToRootTransform = TransformHelper.getTransformToRoot(visualConnection); if (connection.getFirst() == transition) { Point2D posInLocalSpace = visualConnection.getPointOnConnection(1.0 / 3.0); Point2D posInRootSpace = localToRootTransform.transform(posInLocalSpace, null); succPoints.add(posInRootSpace); } if (connection.getSecond() == transition) { Point2D posInLocalSpace = visualConnection.getPointOnConnection(2.0 / 3.0); Point2D posInRootSpace = localToRootTransform.transform(posInLocalSpace, null); predPoints.add(posInRootSpace); } } } Point2D pos = transition.getRootSpacePosition(); if (predPoints.isEmpty() && succPoints.isEmpty()) { predPoints.add(new Point2D.Double(pos.getX(), pos.getY() - 1.0)); succPoints.add(new Point2D.Double(pos.getX(), pos.getY() + 1.0)); } else if (predPoints.isEmpty()) { Point2D succPoint = MixUtils.middlePoint(succPoints); predPoints.add(new Point2D.Double(2.0 * pos.getX() - succPoint.getX(), 2.0 * pos.getY() - succPoint.getY())); } else if (succPoints.isEmpty()) { Point2D predPoint = MixUtils.middlePoint(predPoints); succPoints.add(new Point2D.Double(2.0 * pos.getX() - predPoint.getX(), 2.0 * pos.getY() - predPoint.getY())); } Point2D predPoint = MixUtils.middlePoint(predPoints); Point2D succPoint = MixUtils.middlePoint(succPoints); return new Pair<Point2D, Point2D>(predPoint, succPoint); } public Pair<String, String> getSufixes() { Pair<String, String> result = null; String ans = DialogUtils.showInput("Enter a pair of space-separated suffixes for handshake signals:", SUFFIX_REQ + " " + SUFFIX_ACK); if (ans != null) { String[] split = ans.trim().split("\\s"); if (split.length == 2) { result = Pair.of(split[0], split[1]); } else { DialogUtils.showError("Two suffixes are required for handshake expansion.\n\n" + "Default suffixes " + SUFFIX_REQ + " and " + SUFFIX_ACK + " will be used."); result = null; } } return result; } }
package com.algorithm.graph; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; public class Graph<T> { private List<Edge<T>> allEdges; private Map<Long, Vertex<T>> allVertex; boolean isDirected = false; public Graph(boolean isDirected) { allEdges = new ArrayList<Edge<T>>(); allVertex = new HashMap<Long, Vertex<T>>(); this.isDirected = isDirected; } public void addEdge(long id1, long id2) { addEdge(id1, id2, 0); } // This works only for directed graph because for undirected graph we can // end up // adding edges two times to allEdges public void addVertex(Vertex<T> vertex) { if (allVertex.containsKey(vertex.getId())) { return; } allVertex.put(vertex.getId(), vertex); for (Edge<T> edge : vertex.getEdges()) { allEdges.add(edge); } } public Vertex<T> addSingleVertex(long id) { if (allVertex.containsKey(id)) { return allVertex.get(id); } Vertex<T> v = new Vertex<T>(id); allVertex.put(id, v); return v; } public Vertex<T> getVertex(long id) { return allVertex.get(id); } public void addEdge(long id1, long id2, int weight) { Vertex<T> vertex1 = null; if (allVertex.containsKey(id1)) { vertex1 = allVertex.get(id1); } else { vertex1 = new Vertex<T>(id1); allVertex.put(id1, vertex1); } Vertex<T> vertex2 = null; if (allVertex.containsKey(id2)) { vertex2 = allVertex.get(id2); } else { vertex2 = new Vertex<T>(id2); allVertex.put(id2, vertex2); } Edge<T> edge = new Edge<T>(vertex1, vertex2, isDirected, weight); allEdges.add(edge); vertex1.addAdjacentVertex(edge, vertex2); if (!isDirected) { vertex2.addAdjacentVertex(edge, vertex1); } } public List<Edge<T>> getAllEdges() { return allEdges; } public Collection<Vertex<T>> getAllVertex() { return allVertex.values(); } public void setDataForVertex(long id, T data) { if (allVertex.containsKey(id)) { Vertex<T> vertex = allVertex.get(id); vertex.setData(data); } } @Override public String toString() { StringBuffer buffer = new StringBuffer(); for (Edge<T> edge : getAllEdges()) { buffer.append(edge.getVertex1() + " " + edge.getVertex2() + " " + edge.getWeight()); buffer.append("\n"); } return buffer.toString(); } } class Vertex<T> { long id; private T data; private List<Edge<T>> edges = new ArrayList<Edge<T>>(); private List<Vertex<T>> adjacentVertex = new ArrayList<Vertex<T>>(); Vertex(long id) { this.id = id; } public long getId() { return id; } public void setData(T data) { this.data = data; } public T getData() { return data; } public void addAdjacentVertex(Edge<T> e, Vertex<T> v) { edges.add(e); adjacentVertex.add(v); } public String toString() { return String.valueOf(id); } public List<Vertex<T>> getAdjacentVertexes() { return adjacentVertex; } public List<Edge<T>> getEdges() { return edges; } public int getDegree() { return edges.size(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (id ^ (id >>> 32)); return result; } @SuppressWarnings("rawtypes") @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Vertex other = (Vertex) obj; if (id != other.id) return false; return true; } } class Edge<T> { private boolean isDirected = false; private Vertex<T> vertex1; private Vertex<T> vertex2; private int weight; Edge(Vertex<T> vertex1, Vertex<T> vertex2) { this.vertex1 = vertex1; this.vertex2 = vertex2; } Edge(Vertex<T> vertex1, Vertex<T> vertex2, boolean isDirected, int weight) { this.vertex1 = vertex1; this.vertex2 = vertex2; this.weight = weight; this.isDirected = isDirected; } Edge(Vertex<T> vertex1, Vertex<T> vertex2, boolean isDirected) { this.vertex1 = vertex1; this.vertex2 = vertex2; this.isDirected = isDirected; } Vertex<T> getVertex1() { return vertex1; } Vertex<T> getVertex2() { return vertex2; } int getWeight() { return weight; } public boolean isDirected() { return isDirected; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((vertex1 == null) ? 0 : vertex1.hashCode()); result = prime * result + ((vertex2 == null) ? 0 : vertex2.hashCode()); return result; } @SuppressWarnings("rawtypes") @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Edge other = (Edge) obj; if (vertex1 == null) { if (other.vertex1 != null) return false; } else if (!vertex1.equals(other.vertex1)) return false; if (vertex2 == null) { if (other.vertex2 != null) return false; } else if (!vertex2.equals(other.vertex2)) return false; return true; } @Override public String toString() { return "Edge [isDirected=" + isDirected + ", vertex1=" + vertex1 + ", vertex2=" + vertex2 + ", weight=" + weight + "]"; } }
package se.l4.crayon.http.servlet.internal; import javax.servlet.DispatcherType; import javax.servlet.Filter; import javax.servlet.Servlet; import com.google.inject.Injector; import com.google.inject.Provider; import org.eclipse.collections.api.factory.Lists; import org.eclipse.collections.api.factory.Maps; import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.api.list.MutableList; import org.eclipse.collections.api.map.ImmutableMap; import org.eclipse.collections.api.map.MapIterable; import se.l4.crayon.http.servlet.ServletBinder; import se.l4.crayon.http.servlet.ServletConfiguration; import se.l4.crayon.http.servlet.ServletConfiguration.BoundFilter; import se.l4.crayon.http.servlet.ServletConfiguration.BoundServlet; public class ServletBinderImpl implements ServletBinder { private final Injector injector; private final MutableList<BoundServlet> servlets; private final MutableList<BoundFilter> filters; public ServletBinderImpl(Injector injector) { this.injector = injector; servlets = Lists.mutable.empty(); filters = Lists.mutable.empty(); } @Override public FilterBuilder filter(String... path) { return filter(Lists.immutable.of(path)); } @Override public FilterBuilder filter(Iterable<String> paths) { return new FilterBuilderImpl( Lists.immutable.withAll(paths), Maps.immutable.empty(), false, new DispatcherType[] { DispatcherType.REQUEST } ); } @Override public ServletBuilder serve(String... paths) { return serve(Lists.immutable.of(paths)); } @Override public ServletBuilder serve(Iterable<String> paths) { return new ServletBuilderImpl( Lists.immutable.withAll(paths), Maps.immutable.empty(), false ); } public ServletConfiguration toConfig() { return new ServletConfigurationImpl( filters.toImmutable(), servlets.toImmutable() ); } private class ServletBuilderImpl implements ServletBuilder { private final ImmutableList<String> path; private final ImmutableMap<String, String> initParams; private final boolean asyncSupported; public ServletBuilderImpl( ImmutableList<String> path, ImmutableMap<String, String> initParams, boolean asyncSupported ) { this.path = path; this.asyncSupported = asyncSupported; this.initParams = initParams; } @Override public ServletBuilder param(String key, String value) { return new ServletBuilderImpl( path, initParams.newWithKeyValue(key, value), asyncSupported ); } @Override public ServletBuilder params(MapIterable<String, String> params) { return new ServletBuilderImpl( path, initParams.newWithAllKeyValues(params.keyValuesView()), asyncSupported ); } @Override public ServletBuilder asyncSupported() { return new ServletBuilderImpl( path, initParams, true ); } @Override public ServletBuilder asyncSupported(boolean supported) { return new ServletBuilderImpl( path, initParams, supported ); } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public void with(Class<? extends Servlet> type) { Provider<? extends Servlet> provider = injector.getProvider(type); with((Class) type, provider); } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public void with(Servlet instance) { with((Class) instance.getClass(), () -> instance); } @Override public <T extends Servlet> void with(Class<T> type, Provider<T> provider) { servlets.add(new BoundServletImpl( type.getSimpleName() + servlets.size(), type, provider, path, initParams, asyncSupported )); } } private class FilterBuilderImpl implements FilterBuilder { private final ImmutableList<String> path; private final ImmutableMap<String, String> initParams; private final boolean asyncSupported; private final DispatcherType[] dispatcherTypes; public FilterBuilderImpl( ImmutableList<String> path, ImmutableMap<String, String> initParams, boolean asyncSupported, DispatcherType[] dispatcherTypes ) { this.path = path; this.asyncSupported = asyncSupported; this.initParams = initParams; this.dispatcherTypes = dispatcherTypes; } @Override public FilterBuilder param(String key, String value) { return new FilterBuilderImpl( path, initParams.newWithKeyValue(key, value), asyncSupported, dispatcherTypes ); } @Override public FilterBuilder params(MapIterable<String, String> params) { return new FilterBuilderImpl( path, initParams.newWithAllKeyValues(params.keyValuesView()), asyncSupported, dispatcherTypes ); } @Override public FilterBuilder asyncSupported() { return new FilterBuilderImpl( path, initParams, true, dispatcherTypes ); } @Override public FilterBuilder asyncSupported(boolean supported) { return new FilterBuilderImpl( path, initParams, supported, dispatcherTypes ); } @Override public FilterBuilder dispatcherTypes(DispatcherType... types) { return new FilterBuilderImpl( path, initParams, asyncSupported, types ); } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public void with(Class<? extends Filter> type) { Provider<? extends Filter> provider = injector.getProvider(type); with((Class) type, provider); } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public void with(Filter instance) { with((Class) instance.getClass(), () -> instance); } @Override public <T extends Filter> void with(Class<T> type, Provider<T> provider) { filters.add(new BoundFilterImpl( type.getSimpleName() + servlets.size(), type, provider, path, initParams, asyncSupported, dispatcherTypes )); } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTableSetBucketsSizeVer13 implements OFBsnTableSetBucketsSize { private static final Logger logger = LoggerFactory.getLogger(OFBsnTableSetBucketsSizeVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int LENGTH = 24; private final static long DEFAULT_XID = 0x0L; private final static TableId DEFAULT_TABLE_ID = TableId.ALL; private final static long DEFAULT_BUCKETS_SIZE = 0x0L; // OF message fields private final long xid; private final TableId tableId; private final long bucketsSize; // // Immutable default instance final static OFBsnTableSetBucketsSizeVer13 DEFAULT = new OFBsnTableSetBucketsSizeVer13( DEFAULT_XID, DEFAULT_TABLE_ID, DEFAULT_BUCKETS_SIZE ); // package private constructor - used by readers, builders, and factory OFBsnTableSetBucketsSizeVer13(long xid, TableId tableId, long bucketsSize) { if(tableId == null) { throw new NullPointerException("OFBsnTableSetBucketsSizeVer13: property tableId cannot be null"); } this.xid = U32.normalize(xid); this.tableId = tableId; this.bucketsSize = U32.normalize(bucketsSize); } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x3dL; } @Override public TableId getTableId() { return tableId; } @Override public long getBucketsSize() { return bucketsSize; } public OFBsnTableSetBucketsSize.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTableSetBucketsSize.Builder { final OFBsnTableSetBucketsSizeVer13 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean tableIdSet; private TableId tableId; private boolean bucketsSizeSet; private long bucketsSize; BuilderWithParent(OFBsnTableSetBucketsSizeVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnTableSetBucketsSize.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x3dL; } @Override public TableId getTableId() { return tableId; } @Override public OFBsnTableSetBucketsSize.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public long getBucketsSize() { return bucketsSize; } @Override public OFBsnTableSetBucketsSize.Builder setBucketsSize(long bucketsSize) { this.bucketsSize = bucketsSize; this.bucketsSizeSet = true; return this; } @Override public OFBsnTableSetBucketsSize build() { long xid = this.xidSet ? this.xid : parentMessage.xid; TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); long bucketsSize = this.bucketsSizeSet ? this.bucketsSize : parentMessage.bucketsSize; // return new OFBsnTableSetBucketsSizeVer13( xid, tableId, bucketsSize ); } } static class Builder implements OFBsnTableSetBucketsSize.Builder { // OF message fields private boolean xidSet; private long xid; private boolean tableIdSet; private TableId tableId; private boolean bucketsSizeSet; private long bucketsSize; @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFType getType() { return OFType.EXPERIMENTER; } @Override public long getXid() { return xid; } @Override public OFBsnTableSetBucketsSize.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x3dL; } @Override public TableId getTableId() { return tableId; } @Override public OFBsnTableSetBucketsSize.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public long getBucketsSize() { return bucketsSize; } @Override public OFBsnTableSetBucketsSize.Builder setBucketsSize(long bucketsSize) { this.bucketsSize = bucketsSize; this.bucketsSizeSet = true; return this; } // @Override public OFBsnTableSetBucketsSize build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); long bucketsSize = this.bucketsSizeSet ? this.bucketsSize : DEFAULT_BUCKETS_SIZE; return new OFBsnTableSetBucketsSizeVer13( xid, tableId, bucketsSize ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTableSetBucketsSize> { @Override public OFBsnTableSetBucketsSize readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 4 byte version = bb.readByte(); if(version != (byte) 0x4) throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version); // fixed value property type == 4 byte type = bb.readByte(); if(type != (byte) 0x4) throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type); int length = U16.f(bb.readShort()); if(length != 24) throw new OFParseError("Wrong length: Expected=24(24), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x3dL int subtype = bb.readInt(); if(subtype != 0x3d) throw new OFParseError("Wrong subtype: Expected=0x3dL(0x3dL), got="+subtype); // pad: 1 bytes bb.skipBytes(1); TableId tableId = TableId.readByte(bb); // pad: 2 bytes bb.skipBytes(2); long bucketsSize = U32.f(bb.readInt()); OFBsnTableSetBucketsSizeVer13 bsnTableSetBucketsSizeVer13 = new OFBsnTableSetBucketsSizeVer13( xid, tableId, bucketsSize ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTableSetBucketsSizeVer13); return bsnTableSetBucketsSizeVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTableSetBucketsSizeVer13Funnel FUNNEL = new OFBsnTableSetBucketsSizeVer13Funnel(); static class OFBsnTableSetBucketsSizeVer13Funnel implements Funnel<OFBsnTableSetBucketsSizeVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTableSetBucketsSizeVer13 message, PrimitiveSink sink) { // fixed value property version = 4 sink.putByte((byte) 0x4); // fixed value property type = 4 sink.putByte((byte) 0x4); // fixed value property length = 24 sink.putShort((short) 0x18); sink.putLong(message.xid); // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x3dL sink.putInt(0x3d); // skip pad (1 bytes) message.tableId.putTo(sink); // skip pad (2 bytes) sink.putLong(message.bucketsSize); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTableSetBucketsSizeVer13> { @Override public void write(ByteBuf bb, OFBsnTableSetBucketsSizeVer13 message) { // fixed value property version = 4 bb.writeByte((byte) 0x4); // fixed value property type = 4 bb.writeByte((byte) 0x4); // fixed value property length = 24 bb.writeShort((short) 0x18); bb.writeInt(U32.t(message.xid)); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x3dL bb.writeInt(0x3d); // pad: 1 bytes bb.writeZero(1); message.tableId.writeByte(bb); // pad: 2 bytes bb.writeZero(2); bb.writeInt(U32.t(message.bucketsSize)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTableSetBucketsSizeVer13("); b.append("xid=").append(xid); b.append(", "); b.append("tableId=").append(tableId); b.append(", "); b.append("bucketsSize=").append(bucketsSize); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTableSetBucketsSizeVer13 other = (OFBsnTableSetBucketsSizeVer13) obj; if( xid != other.xid) return false; if (tableId == null) { if (other.tableId != null) return false; } else if (!tableId.equals(other.tableId)) return false; if( bucketsSize != other.bucketsSize) return false; return true; } @Override public boolean equalsIgnoreXid(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTableSetBucketsSizeVer13 other = (OFBsnTableSetBucketsSizeVer13) obj; // ignore XID if (tableId == null) { if (other.tableId != null) return false; } else if (!tableId.equals(other.tableId)) return false; if( bucketsSize != other.bucketsSize) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * (int) (bucketsSize ^ (bucketsSize >>> 32)); return result; } @Override public int hashCodeIgnoreXid() { final int prime = 31; int result = 1; // ignore XID result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * (int) (bucketsSize ^ (bucketsSize >>> 32)); return result; } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.client5.http.impl.cache; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import org.apache.hc.client5.http.cache.HttpCacheEntry; import org.apache.hc.client5.http.classic.methods.HttpGet; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpHost; import org.apache.hc.core5.http.HttpRequest; import org.apache.hc.core5.http.message.BasicHeader; import org.apache.hc.core5.http.message.BasicHeaderIterator; import org.apache.hc.core5.http.message.BasicHttpRequest; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @SuppressWarnings({"boxing","static-access"}) // this is test code public class TestCacheKeyGenerator { private static final BasicHttpRequest REQUEST_FULL_EPISODES = new BasicHttpRequest("GET", "/full_episodes"); private static final BasicHttpRequest REQUEST_ROOT = new BasicHttpRequest("GET", "/"); private CacheKeyGenerator extractor; private HttpHost defaultHost; private HttpCacheEntry mockEntry; private HttpRequest mockRequest; @Before public void setUp() throws Exception { defaultHost = new HttpHost("foo.example.com"); mockEntry = mock(HttpCacheEntry.class); mockRequest = mock(HttpRequest.class); extractor = CacheKeyGenerator.INSTANCE; } @Test public void testExtractsUriFromAbsoluteUriInRequest() { final HttpHost host = new HttpHost("bar.example.com"); final HttpRequest req = new HttpGet("http://foo.example.com/"); Assert.assertEquals("http://foo.example.com:80/", extractor.generateKey(host, req)); } @Test public void testGetURIWithDefaultPortAndScheme() { Assert.assertEquals("http://www.comcast.net:80/", extractor.generateKey(new HttpHost( "www.comcast.net"), REQUEST_ROOT)); Assert.assertEquals("http://www.fancast.com:80/full_episodes", extractor.generateKey(new HttpHost( "www.fancast.com"), REQUEST_FULL_EPISODES)); } @Test public void testGetURIWithDifferentScheme() { Assert.assertEquals("https://www.comcast.net:443/", extractor.generateKey(new HttpHost( "www.comcast.net", -1, "https"), REQUEST_ROOT)); Assert.assertEquals("myhttp://www.fancast.com/full_episodes", extractor.generateKey( new HttpHost("www.fancast.com", -1, "myhttp"), REQUEST_FULL_EPISODES)); } @Test public void testGetURIWithDifferentPort() { Assert.assertEquals("http://www.comcast.net:8080/", extractor.generateKey(new HttpHost( "www.comcast.net", 8080), REQUEST_ROOT)); Assert.assertEquals("http://www.fancast.com:9999/full_episodes", extractor.generateKey( new HttpHost("www.fancast.com", 9999), REQUEST_FULL_EPISODES)); } @Test public void testGetURIWithDifferentPortAndScheme() { Assert.assertEquals("https://www.comcast.net:8080/", extractor.generateKey(new HttpHost( "www.comcast.net", 8080, "https"), REQUEST_ROOT)); Assert.assertEquals("myhttp://www.fancast.com:9999/full_episodes", extractor.generateKey( new HttpHost("www.fancast.com", 9999, "myhttp"), REQUEST_FULL_EPISODES)); } @Test public void testGetURIWithQueryParameters() { Assert.assertEquals("http://www.comcast.net:80/?foo=bar", extractor.generateKey(new HttpHost( "www.comcast.net", -1, "http"), new BasicHttpRequest("GET", "/?foo=bar"))); Assert.assertEquals("http://www.fancast.com:80/full_episodes?foo=bar", extractor.generateKey( new HttpHost("www.fancast.com", -1, "http"), new BasicHttpRequest("GET", "/full_episodes?foo=bar"))); } @Test public void testGetVariantURIWithNoVaryHeaderReturnsNormalURI() { final String theURI = "theURI"; when(mockEntry.hasVariants()).thenReturn(false); extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); Assert.assertSame(theURI, result); } @Test public void testGetVariantURIWithSingleValueVaryHeaderPrepends() { final String theURI = "theURI"; final Header[] varyHeaders = { new BasicHeader("Vary", "Accept-Encoding") }; final Header[] encHeaders = { new BasicHeader("Accept-Encoding", "gzip") }; extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; when(mockEntry.hasVariants()).thenReturn(true); when(mockEntry.headerIterator("Vary")).thenReturn(new BasicHeaderIterator(varyHeaders, "Vary")); when(mockRequest.getHeaders("Accept-Encoding")).thenReturn(encHeaders); final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); verify(mockEntry).headerIterator("Vary"); verify(mockRequest).getHeaders("Accept-Encoding"); Assert.assertEquals("{Accept-Encoding=gzip}" + theURI, result); } @Test public void testGetVariantURIWithMissingRequestHeader() { final String theURI = "theURI"; final Header[] noHeaders = new Header[0]; final Header[] varyHeaders = { new BasicHeader("Vary", "Accept-Encoding") }; extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; when(mockEntry.hasVariants()).thenReturn(true); when(mockEntry.headerIterator("Vary")).thenReturn(new BasicHeaderIterator(varyHeaders, "Vary")); when(mockRequest.getHeaders("Accept-Encoding")) .thenReturn(noHeaders); final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); verify(mockEntry).headerIterator("Vary"); verify(mockRequest).getHeaders("Accept-Encoding"); Assert.assertEquals("{Accept-Encoding=}" + theURI, result); } @Test public void testGetVariantURIAlphabetizesWithMultipleVaryingHeaders() { final String theURI = "theURI"; final Header[] varyHeaders = { new BasicHeader("Vary", "User-Agent, Accept-Encoding") }; final Header[] encHeaders = { new BasicHeader("Accept-Encoding", "gzip") }; final Header[] uaHeaders = { new BasicHeader("User-Agent", "browser") }; extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; when(mockEntry.hasVariants()).thenReturn(true); when(mockEntry.headerIterator("Vary")).thenReturn(new BasicHeaderIterator(varyHeaders, "Vary")); when(mockRequest.getHeaders("Accept-Encoding")).thenReturn(encHeaders); when(mockRequest.getHeaders("User-Agent")).thenReturn(uaHeaders); final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); verify(mockEntry).headerIterator("Vary"); verify(mockRequest).getHeaders("Accept-Encoding"); verify(mockRequest).getHeaders("User-Agent"); Assert.assertEquals("{Accept-Encoding=gzip&User-Agent=browser}" + theURI, result); } @Test public void testGetVariantURIHandlesMultipleVaryHeaders() { final String theURI = "theURI"; final Header[] varyHeaders = { new BasicHeader("Vary", "User-Agent"), new BasicHeader("Vary", "Accept-Encoding") }; final Header[] encHeaders = { new BasicHeader("Accept-Encoding", "gzip") }; final Header[] uaHeaders = { new BasicHeader("User-Agent", "browser") }; extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; when(mockEntry.hasVariants()).thenReturn(true); when(mockEntry.headerIterator("Vary")).thenReturn(new BasicHeaderIterator(varyHeaders, "Vary")); when(mockRequest.getHeaders("Accept-Encoding")).thenReturn(encHeaders); when(mockRequest.getHeaders("User-Agent")).thenReturn(uaHeaders); final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); verify(mockEntry).headerIterator("Vary"); verify(mockRequest).getHeaders("Accept-Encoding"); verify(mockRequest).getHeaders("User-Agent"); Assert.assertEquals("{Accept-Encoding=gzip&User-Agent=browser}" + theURI, result); } @Test public void testGetVariantURIHandlesMultipleLineRequestHeaders() { final String theURI = "theURI"; final Header[] varyHeaders = { new BasicHeader("Vary", "User-Agent, Accept-Encoding") }; final Header[] encHeaders = { new BasicHeader("Accept-Encoding", "gzip"), new BasicHeader("Accept-Encoding", "deflate") }; final Header[] uaHeaders = { new BasicHeader("User-Agent", "browser") }; extractor = new CacheKeyGenerator() { @Override public String generateKey(final HttpHost h, final HttpRequest request) { Assert.assertSame(defaultHost, h); Assert.assertSame(mockRequest, request); return theURI; } }; when(mockEntry.hasVariants()).thenReturn(true); when(mockEntry.headerIterator("Vary")).thenReturn(new BasicHeaderIterator(varyHeaders, "Vary")); when(mockRequest.getHeaders("Accept-Encoding")).thenReturn(encHeaders); when(mockRequest.getHeaders("User-Agent")).thenReturn(uaHeaders); final String result = extractor.generateKey(defaultHost, mockRequest, mockEntry); verify(mockEntry).hasVariants(); verify(mockEntry).headerIterator("Vary"); verify(mockRequest).getHeaders("Accept-Encoding"); verify(mockRequest).getHeaders("User-Agent"); Assert.assertEquals("{Accept-Encoding=gzip%2C+deflate&User-Agent=browser}" + theURI, result); } /* * "When comparing two URIs to decide if they match or not, a client * SHOULD use a case-sensitive octet-by-octet comparison of the entire * URIs, with these exceptions: * - A port that is empty or not given is equivalent to the default * port for that URI-reference; * - Comparisons of host names MUST be case-insensitive; * - Comparisons of scheme names MUST be case-insensitive; * - An empty abs_path is equivalent to an abs_path of "/". * Characters other than those in the 'reserved' and 'unsafe' sets * (see RFC 2396 [42]) are equivalent to their '"%" HEX HEX' encoding." * * http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.2.3 */ @Test public void testEmptyPortEquivalentToDefaultPortForHttp() { final HttpHost host1 = new HttpHost("foo.example.com:"); final HttpHost host2 = new HttpHost("foo.example.com:80"); final HttpRequest req = new BasicHttpRequest("GET", "/"); Assert.assertEquals(extractor.generateKey(host1, req), extractor.generateKey(host2, req)); } @Test public void testEmptyPortEquivalentToDefaultPortForHttps() { final HttpHost host1 = new HttpHost("foo.example.com", -1, "https"); final HttpHost host2 = new HttpHost("foo.example.com", 443, "https"); final HttpRequest req = new BasicHttpRequest("GET", "/"); final String uri1 = extractor.generateKey(host1, req); final String uri2 = extractor.generateKey(host2, req); Assert.assertEquals(uri1, uri2); } @Test public void testEmptyPortEquivalentToDefaultPortForHttpsAbsoluteURI() { final HttpHost host = new HttpHost("foo.example.com", -1, "https"); final HttpGet get1 = new HttpGet("https://bar.example.com:/"); final HttpGet get2 = new HttpGet("https://bar.example.com:443/"); final String uri1 = extractor.generateKey(host, get1); final String uri2 = extractor.generateKey(host, get2); Assert.assertEquals(uri1, uri2); } @Test public void testNotProvidedPortEquivalentToDefaultPortForHttpsAbsoluteURI() { final HttpHost host = new HttpHost("foo.example.com", -1, "https"); final HttpGet get1 = new HttpGet("https://bar.example.com/"); final HttpGet get2 = new HttpGet("https://bar.example.com:443/"); final String uri1 = extractor.generateKey(host, get1); final String uri2 = extractor.generateKey(host, get2); Assert.assertEquals(uri1, uri2); } @Test public void testNotProvidedPortEquivalentToDefaultPortForHttp() { final HttpHost host1 = new HttpHost("foo.example.com"); final HttpHost host2 = new HttpHost("foo.example.com:80"); final HttpRequest req = new BasicHttpRequest("GET", "/"); Assert.assertEquals(extractor.generateKey(host1, req), extractor.generateKey(host2, req)); } @Test public void testHostNameComparisonsAreCaseInsensitive() { final HttpHost host1 = new HttpHost("foo.example.com"); final HttpHost host2 = new HttpHost("FOO.EXAMPLE.COM"); final HttpRequest req = new BasicHttpRequest("GET", "/"); Assert.assertEquals(extractor.generateKey(host1, req), extractor.generateKey(host2, req)); } @Test public void testSchemeNameComparisonsAreCaseInsensitive() { final HttpHost host1 = new HttpHost("foo.example.com", -1, "http"); final HttpHost host2 = new HttpHost("foo.example.com", -1, "HTTP"); final HttpRequest req = new BasicHttpRequest("GET", "/"); Assert.assertEquals(extractor.generateKey(host1, req), extractor.generateKey(host2, req)); } @Test public void testEmptyAbsPathIsEquivalentToSlash() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/"); final HttpRequest req2 = new HttpGet("http://foo.example.com"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testExtraDotSegmentsAreIgnored() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/"); final HttpRequest req2 = new HttpGet("http://foo.example.com/./"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testExtraDotDotSegmentsAreIgnored() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/"); final HttpRequest req2 = new HttpGet("http://foo.example.com/.././../"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testIntermidateDotDotSegementsAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/home.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith/../home.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testIntermidateEncodedDotDotSegementsAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/home.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith%2F../home.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testIntermidateDotSegementsAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/~smith/home.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith/./home.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testEquivalentPathEncodingsAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/~smith/home.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith/home.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testEquivalentExtraPathEncodingsAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/~smith/home.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith%2Fhome.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } @Test public void testEquivalentExtraPathEncodingsWithPercentAreEquivalent() { final HttpHost host = new HttpHost("foo.example.com"); final HttpRequest req1 = new BasicHttpRequest("GET", "/~smith/home%20folder.html"); final HttpRequest req2 = new BasicHttpRequest("GET", "/%7Esmith%2Fhome%20folder.html"); Assert.assertEquals(extractor.generateKey(host, req1), extractor.generateKey(host, req2)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io.input; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.StringReader; import java.io.UncheckedIOException; import java.nio.CharBuffer; import org.apache.commons.io.IOUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** * Tests {@link UncheckedFilterReader}. */ public class UncheckedBufferedReaderTest { private UncheckedBufferedReader ucStringReader; private UncheckedBufferedReader ucBrokenReader; private IOException exception = new IOException("test exception"); @SuppressWarnings("resource") @BeforeEach public void beforeEach() { ucStringReader = UncheckedBufferedReader.on(new StringReader("01")); exception = new IOException("test exception"); ucBrokenReader = UncheckedBufferedReader.on(new BrokenReader(exception)); } @Test public void testBufferSize() { try (UncheckedBufferedReader uncheckedReader = new UncheckedBufferedReader(new StringReader("0123456789"), 2)) { assertEquals('0', uncheckedReader.read()); } } @Test public void testClose() { ucStringReader.close(); assertThrows(UncheckedIOException.class, () -> ucBrokenReader.read()); } @Test public void testCloseThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.close()).getCause()); } @Test public void testMarkReset() { ucStringReader.mark(10); final int c = ucStringReader.read(); ucStringReader.reset(); assertEquals(c, ucStringReader.read()); } @Test public void testMarkThrows() { try (UncheckedBufferedReader closedReader = UncheckedBufferedReader.on(ClosedReader.INSTANCE)) { closedReader.close(); assertThrows(UncheckedIOException.class, () -> closedReader.mark(1)); } } @Test public void testRead() { try (final UncheckedBufferedReader uncheckedReader = UncheckedBufferedReader.on(ucStringReader)) { assertEquals('0', uncheckedReader.read()); assertEquals('1', uncheckedReader.read()); assertEquals(IOUtils.EOF, uncheckedReader.read()); assertEquals(IOUtils.EOF, uncheckedReader.read()); } } @Test public void testReadCharArray() { try (final UncheckedBufferedReader uncheckedReader = UncheckedBufferedReader.on(ucStringReader)) { final char[] array = new char[1]; assertEquals(1, uncheckedReader.read(array)); assertEquals('0', array[0]); array[0] = 0; assertEquals(1, uncheckedReader.read(array)); assertEquals('1', array[0]); array[0] = 0; assertEquals(IOUtils.EOF, uncheckedReader.read(array)); assertEquals(0, array[0]); assertEquals(IOUtils.EOF, uncheckedReader.read(array)); assertEquals(0, array[0]); } } @Test public void testReadCharArrayIndexed() { try (final UncheckedBufferedReader uncheckedReader = UncheckedBufferedReader.on(ucStringReader)) { final char[] array = new char[1]; assertEquals(1, uncheckedReader.read(array, 0, 1)); assertEquals('0', array[0]); array[0] = 0; assertEquals(1, uncheckedReader.read(array, 0, 1)); assertEquals('1', array[0]); array[0] = 0; assertEquals(IOUtils.EOF, uncheckedReader.read(array, 0, 1)); assertEquals(0, array[0]); assertEquals(IOUtils.EOF, uncheckedReader.read(array, 0, 1)); assertEquals(0, array[0]); } } @Test public void testReadCharArrayIndexedThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.read(new char[1], 0, 1)).getCause()); } @Test public void testReadCharArrayThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.read(new char[1])).getCause()); } @Test public void testReadCharBuffer() { try (final UncheckedBufferedReader uncheckedReader = UncheckedBufferedReader.on(ucStringReader)) { final CharBuffer buffer = CharBuffer.wrap(new char[1]); assertEquals(1, uncheckedReader.read(buffer)); buffer.flip(); assertEquals('0', buffer.charAt(0)); buffer.put(0, (char) 0); assertEquals(1, uncheckedReader.read(buffer)); buffer.flip(); assertEquals('1', buffer.charAt(0)); buffer.put(0, (char) 0); assertEquals(IOUtils.EOF, uncheckedReader.read(buffer)); buffer.flip(); assertEquals(0, buffer.length()); assertEquals(0, uncheckedReader.read(buffer)); buffer.flip(); assertEquals(0, buffer.length()); } } @Test public void testReadCharBufferThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.read(CharBuffer.wrap(new char[1]))).getCause()); } @Test public void testReadLine() { try (final UncheckedBufferedReader uncheckedReader = UncheckedBufferedReader.on(ucStringReader)) { assertEquals("01", uncheckedReader.readLine()); assertEquals(IOUtils.EOF, uncheckedReader.read()); assertEquals(IOUtils.EOF, uncheckedReader.read()); } } @Test public void testReadLineThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.readLine()).getCause()); } @Test public void testReadThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.read()).getCause()); } @Test public void testReady() { assertTrue(ucStringReader.ready()); } @Test public void testReadyThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.ready()).getCause()); } @Test public void testResetThrows() { try (UncheckedBufferedReader closedReader = UncheckedBufferedReader.on(ClosedReader.INSTANCE)) { closedReader.close(); assertThrows(UncheckedIOException.class, () -> ucBrokenReader.reset()); } } @Test public void testSkip() { assertEquals(1, ucStringReader.skip(1)); } @Test public void testSkipThrows() { assertEquals(exception, assertThrows(UncheckedIOException.class, () -> ucBrokenReader.skip(1)).getCause()); } }
package net.sf.jabref.gui; import java.awt.Component; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import javax.swing.JFileChooser; import javax.swing.JOptionPane; import javax.swing.filechooser.FileFilter; import javax.swing.filechooser.FileNameExtensionFilter; import net.sf.jabref.Globals; import net.sf.jabref.logic.l10n.Localization; import net.sf.jabref.logic.util.FileExtensions; import net.sf.jabref.preferences.JabRefPreferences; public class FileDialog { /** * Custom confirmation dialog * http://stackoverflow.com/a/3729157 */ private final JFileChooser fileChooser = new JFileChooser() { @Override public void approveSelection() { File file = getSelectedFile(); if (file.exists() && (getDialogType() == SAVE_DIALOG)) { int result = JOptionPane.showConfirmDialog(this, Localization.lang("'%0' exists. Overwrite file?", file.getName()), Localization.lang("Existing file"), JOptionPane.YES_NO_CANCEL_OPTION); switch (result) { case JOptionPane.YES_OPTION: super.approveSelection(); return; case JOptionPane.NO_OPTION: return; case JOptionPane.CLOSED_OPTION: return; case JOptionPane.CANCEL_OPTION: cancelSelection(); return; default: return; } } super.approveSelection(); } }; private final Component parent; private final String directory; private Collection<FileExtensions> extensions = EnumSet.noneOf(FileExtensions.class); /** * Creates a new filedialog showing the current working dir {@link JabRefPreferences#WORKING_DIRECTORY} * @param parent The parent frame associated with this dialog */ public FileDialog(Component parent) { this(parent, getWorkingDir()); } /** * Creates a new dialog in the given directory * @param parent The parent frame associated with this dialog * @param dir The starting directory to show in the dialog */ public FileDialog(Component parent, String dir) { Objects.requireNonNull(dir, "Directory must not be null"); this.parent = parent; this.directory = dir; fileChooser.setCurrentDirectory(Paths.get(dir).toFile()); } /** * Add a single extension as file filter * @param singleExt The extension * @return FileDialog */ public FileDialog withExtension(FileExtensions singleExt) { withExtensions(EnumSet.of(singleExt)); return this; } /** * Add a multiple extensions as file filter * @param fileExtensions The extensions * @return FileDialog */ public FileDialog withExtensions(Collection<FileExtensions> fileExtensions) { this.extensions = fileExtensions; for (FileExtensions ext : fileExtensions) { FileNameExtensionFilter extFilter = new FileNameExtensionFilter(ext.getDescription(), ext.getExtensions()); fileChooser.addChoosableFileFilter(extFilter); } return this; } /** * Sets the default file filter extension for the file dialog. * If the desired extension is not found nothing is changed. * * @param extension the file extension */ public void setDefaultExtension(FileExtensions extension) { Arrays.stream(fileChooser.getChoosableFileFilters()) .filter(f -> Objects.equals(f.getDescription(), extension.getDescription())) .findFirst() .ifPresent(fileChooser::setFileFilter); } /** * Returns the currently selected file filter. * * @return FileFilter */ public FileFilter getFileFilter() { return fileChooser.getFileFilter(); } /** * Sets a custom file filter. * Only use when withExtension() does not suffice. * * @param filter the custom file filter */ public void setFileFilter(FileFilter filter) { fileChooser.setFileFilter(filter); } /** * Updates the working directory preference * @return FileDialog */ public FileDialog updateWorkingDirPref() { Globals.prefs.put(JabRefPreferences.WORKING_DIRECTORY, this.directory); return this; } /** * Shows an {@link JFileChooser#OPEN_DIALOG} and allows to select a single folder * @return The path of the selected folder or {@link Optional#empty()} if dialog is aborted */ public Optional<Path> showDialogAndGetSelectedDirectory() { fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); fileChooser.setDialogTitle(Localization.lang("Select directory")); fileChooser.setApproveButtonText(Localization.lang("Select")); fileChooser.setApproveButtonToolTipText(Localization.lang("Select directory")); return showDialogAndGetSelectedFile(); } /** * Shows an {@link JFileChooser#OPEN_DIALOG} and allows to select multiple files * @return List containing the paths of all files or an empty list if dialog is canceled */ public List<String> showDialogAndGetMultipleFiles() { fileChooser.setDialogType(JFileChooser.OPEN_DIALOG); fileChooser.setMultiSelectionEnabled(true); if (showDialogAndIsAccepted()) { List<String> files = Arrays.stream(fileChooser.getSelectedFiles()).map(File::toString) .collect(Collectors.toList()); return files; } return Collections.emptyList(); } /** * Shows an {@link JFileChooser#OPEN_DIALOG} and allows to select a single file/folder * @return The path of the selected file/folder or {@link Optional#empty()} if dialog is aborted */ public Optional<Path> showDialogAndGetSelectedFile() { fileChooser.setDialogType(JFileChooser.OPEN_DIALOG); if (showDialogAndIsAccepted()) { return Optional.of(fileChooser.getSelectedFile().toPath()); } return Optional.empty(); } /** * Shows an {@link JFileChooser#SAVE_DIALOG} and allows to save a new file <br> * If an extension is provided, adds the extension to the file <br> * Selecting an existing file will show an overwrite dialog * @return The path of the new file, or {@link Optional#empty()} if dialog is aborted */ public Optional<Path> saveNewFile() { fileChooser.setDialogType(JFileChooser.SAVE_DIALOG); if (showDialogAndIsAccepted()) { File file = fileChooser.getSelectedFile(); if (!extensions.isEmpty() && !fileChooser.accept(file)) { return Optional.of(Paths.get(file.getPath() + extensions.iterator().next().getFirstExtensionWithDot())); } return Optional.of(file.toPath()); } return Optional.empty(); } private boolean showDialogAndIsAccepted() { return fileChooser.showDialog(parent, null) == JFileChooser.APPROVE_OPTION; } private static String getWorkingDir() { return Globals.prefs.get(JabRefPreferences.WORKING_DIRECTORY); } }
package com.tommytony.war.volume; import org.bukkit.World; import org.bukkit.block.Block; import com.tommytony.war.Team; import com.tommytony.war.War; import com.tommytony.war.Warzone; import com.tommytony.war.mapper.ZoneVolumeMapper; import com.tommytony.war.structure.Monument; /** * * @author tommytony * */ public class ZoneVolume extends Volume { private Warzone zone; private boolean isSaved = false; public ZoneVolume(String name, World world, Warzone zone) { super(name, world); this.zone = zone; } @Override public int saveBlocks() { // Save blocks directly to disk (i.e. don't put everything in memory) int saved = ZoneVolumeMapper.save(this, this.zone.getName()); War.war.log("Saved " + saved + " blocks in warzone " + this.zone.getName() + ".", java.util.logging.Level.INFO); this.isSaved = true; return saved; } @Override public boolean isSaved() { return this.isSaved; } public void loadCorners() { ZoneVolumeMapper.load(this, this.zone.getName(), this.getWorld(), true); this.isSaved = true; } @Override public int resetBlocks() { // Load blocks directly from disk and onto the map (i.e. no more in-memory warzone blocks) int reset = ZoneVolumeMapper.load(this, this.zone.getName(), this.getWorld(), false); War.war.log("Reset " + reset + " blocks in warzone " + this.zone.getName() + ".", java.util.logging.Level.INFO); this.isSaved = true; return reset; } @Override public void setBlockTypes(int[][][] blockTypes) { return; } @Override public void setBlockDatas(byte[][][] blockData) { return; } public void setNorthwest(Block block) throws NotNorthwestException, TooSmallException, TooBigException { // northwest defaults to top block BlockInfo topBlock = new BlockInfo(block.getX(), 127, block.getZ(), block.getTypeId(), block.getData()); BlockInfo oldCornerOne = this.getCornerOne(); BlockInfo oldCornerTwo = this.getCornerTwo(); if (this.getCornerOne() == null) { if (this.getCornerTwo() == null) { // northwest defaults to corner 1 super.setCornerOne(topBlock); } else if (this.getCornerTwo().getX() <= block.getX() || this.getCornerTwo().getZ() >= block.getZ()) { throw new NotNorthwestException(); } else { // corner 2 already set, but we're sure we're located at the northwest of it super.setCornerOne(topBlock); } } else if (this.getCornerTwo() == null) { // corner 1 already exists, set northwest as corner 2 (only if it's at the northwest of corner 1) if (this.getCornerOne().getX() <= block.getX() || this.getCornerOne().getZ() >= block.getZ()) { throw new NotNorthwestException(); } super.setCornerTwo(topBlock); } else { // both corners already set: we are resizing (only if the new block is northwest relative to the southeasternmost block) if (this.getSoutheastX() <= block.getX() || this.getSoutheastZ() >= block.getZ()) { throw new NotNorthwestException(); } BlockInfo minXBlock = this.getMinXBlock(); // north means min X minXBlock.setX(block.getX()); // mutating, argh! BlockInfo maxZBlock = this.getMaxZBlock(); // west means max Z maxZBlock.setZ(block.getZ()); } if (this.tooSmall() || this.zoneStructuresAreOutside()) { super.setCornerOne(oldCornerOne); super.setCornerTwo(oldCornerTwo); throw new TooSmallException(); } else if (this.tooBig()) { super.setCornerOne(oldCornerOne); super.setCornerTwo(oldCornerTwo); throw new TooBigException(); } } public int getNorthwestX() { if (!this.hasTwoCorners()) { return 0; } else { return this.getMinX(); } } public int getNorthwestZ() { if (!this.hasTwoCorners()) { return 0; } else { return this.getMaxZ(); } } public void setSoutheast(Block block) throws NotSoutheastException, TooSmallException, TooBigException { // southeast defaults to bottom block BlockInfo bottomBlock = new BlockInfo(block.getX(), 0, block.getZ(), block.getTypeId(), block.getData()); BlockInfo oldCornerOne = this.getCornerOne(); BlockInfo oldCornerTwo = this.getCornerTwo(); if (this.getCornerTwo() == null) { if (this.getCornerOne() == null) { // southeast defaults to corner 2 super.setCornerTwo(bottomBlock); } else if (this.getCornerOne().getX() >= block.getX() || this.getCornerOne().getZ() <= block.getZ()) { throw new NotSoutheastException(); } else { // corner 1 already set, but we're sure we're located at the southeast of it super.setCornerTwo(bottomBlock); } } else if (this.getCornerOne() == null) { // corner 2 already exists, set northwest as corner 1 (only if it's at the southeast of corner 2) if (this.getCornerTwo().getX() >= block.getX() || this.getCornerTwo().getZ() <= block.getZ()) { throw new NotSoutheastException(); } super.setCornerOne(bottomBlock); } else { // both corners already set: we are resizing (only if the new block is southeast relative to the northwesternmost block) if (this.getNorthwestX() >= block.getX() || this.getNorthwestZ() <= block.getZ()) { throw new NotSoutheastException(); } BlockInfo maxXBlock = this.getMaxXBlock(); // south means max X maxXBlock.setX(block.getX()); // mutating, argh! BlockInfo minZBlock = this.getMinZBlock(); // east means min Z minZBlock.setZ(block.getZ()); } if (this.tooSmall() || this.zoneStructuresAreOutside()) { super.setCornerOne(oldCornerOne); super.setCornerTwo(oldCornerTwo); throw new TooSmallException(); } else if (this.tooBig()) { super.setCornerOne(oldCornerOne); super.setCornerTwo(oldCornerTwo); throw new TooBigException(); } } public int getSoutheastX() { if (!this.hasTwoCorners()) { return 0; } else { return this.getMaxX(); } } public int getSoutheastZ() { if (!this.hasTwoCorners()) { return 0; } else { return (War.legacyBlockFace ? this.getMinZ() : this.getMaxZ()); } } public int getCenterY() { if (!this.hasTwoCorners()) { return 0; } else { return this.getMinY() + (this.getMaxY() - this.getMinY()) / 2; } } public void setZoneCornerOne(Block block) throws TooSmallException, TooBigException { BlockInfo oldCornerOne = this.getCornerOne(); super.setCornerOne(block); if (this.tooSmall() || this.zoneStructuresAreOutside()) { super.setCornerOne(oldCornerOne); throw new TooSmallException(); } else if (this.tooBig()) { super.setCornerOne(oldCornerOne); throw new TooBigException(); } } public void setZoneCornerTwo(Block block) throws TooSmallException, TooBigException { BlockInfo oldCornerTwo = this.getCornerTwo(); super.setCornerTwo(block); if (this.tooSmall() || this.zoneStructuresAreOutside()) { super.setCornerTwo(oldCornerTwo); throw new TooSmallException(); } else if (this.tooBig()) { super.setCornerTwo(oldCornerTwo); throw new TooBigException(); } } public boolean tooSmall() { if (this.hasTwoCorners() && ((this.getMaxX() - this.getMinX() < 10) || (this.getMaxY() - this.getMinY() < 10) || (this.getMaxZ() - this.getMinZ() < 10))) { return true; } return false; } public boolean tooBig() { if (this.hasTwoCorners() && ((this.getMaxX() - this.getMinX() > 750) || (this.getMaxY() - this.getMinY() > 750) || (this.getMaxZ() - this.getMinZ() > 750))) { return true; } return false; } public boolean zoneStructuresAreOutside() { // check team spawns & flags for (Team team : this.zone.getTeams()) { if (team.getTeamSpawn() != null) { if (!this.isInside(team.getSpawnVolume().getCornerOne()) || !this.isInside(team.getSpawnVolume().getCornerTwo())) { return true; } } if (team.getTeamFlag() != null) { if (!this.isInside(team.getFlagVolume().getCornerOne()) || !this.isInside(team.getFlagVolume().getCornerTwo())) { return true; } } } // check monuments for (Monument monument : this.zone.getMonuments()) { if (monument.getVolume() != null) { if (!this.isInside(monument.getVolume().getCornerOne()) || !this.isInside(monument.getVolume().getCornerTwo())) { return true; } } } return false; } private boolean isInside(BlockInfo info) { if (info.getX() <= this.getMaxX() && info.getX() >= this.getMinX() && info.getY() <= this.getMaxY() && info.getY() >= this.getMinY() && info.getZ() <= this.getMaxZ() && info.getZ() >= this.getMinZ()) { return true; } return false; } //TODO: OPTIMIZE THIS METHOD, THIS IS SLOW AND IMPRACTICAL public boolean isWallBlock(Block block) { return this.isEastWallBlock(block) || this.isNorthWallBlock(block) || this.isSouthWallBlock(block) || this.isWestWallBlock(block) || this.isUpWallBlock(block) || this.isDownWallBlock(block); } public boolean isEastWallBlock(Block block) { if(War.legacyBlockFace) { if (this.getMinZ()== block.getZ() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinX() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; // east wall } } else { if(this.getMaxZ() >= block.getZ() && block.getZ() >= this.getMaxZ() && this.getMaxX() == block.getX() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; //east wall for new dirs } } return false; } public boolean isSouthWallBlock(Block block) { if(War.legacyBlockFace) { if (this.getMaxX() == block.getX() && block.getZ() <= this.getMaxZ() && block.getZ() >= this.getMinZ() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; // south wall } } else { if(this.getMaxZ() == block.getZ() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinZ() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; //south wall for new dirs } } return false; } public boolean isNorthWallBlock(Block block) { if(War.legacyBlockFace) { if (this.getMinX() == block.getX() && block.getZ() <= this.getMaxZ() && block.getZ() >= this.getMinZ() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; // north wall } } else { if(this.getMinZ() == block.getZ() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinX() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; //north wall for new dirs } } return false; } public boolean isWestWallBlock(Block block) { if(War.legacyBlockFace) { if (this.getMaxZ() == block.getZ() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinX() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; // west wall } } else { if(this.getMinX() == block.getX() && block.getZ() <= this.getMaxZ() && block.getZ() >= this.getMinZ() && block.getY() >= this.getMinY() && block.getY() <= this.getMaxY()) { return true; //west wall for new dirs } } return false; } public boolean isUpWallBlock(Block block) { if (this.getMaxY() == block.getY() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinX() && block.getZ() >= this.getMinZ() && block.getZ() <= this.getMaxZ()) { return true; // top wall } return false; } public boolean isDownWallBlock(Block block) { if (this.getMinY() == block.getY() && block.getX() <= this.getMaxX() && block.getX() >= this.getMinX() && block.getZ() >= this.getMinZ() && block.getZ() <= this.getMaxZ()) { return true; // bottom wall } return false; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.feel.lang.ast; import java.math.BigDecimal; import java.math.MathContext; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.function.Supplier; import org.antlr.v4.runtime.ParserRuleContext; import org.kie.dmn.api.feel.runtime.events.FEELEvent.Severity; import org.kie.dmn.feel.lang.EvaluationContext; import org.kie.dmn.feel.lang.Type; import org.kie.dmn.feel.lang.types.BuiltInType; import org.kie.dmn.feel.util.Msg; public class ForExpressionNode extends BaseNode { private List<IterationContextNode> iterationContexts; private BaseNode expression; public ForExpressionNode(ParserRuleContext ctx, ListNode iterationContexts, BaseNode expression) { super( ctx ); this.iterationContexts = new ArrayList<>( ); this.expression = expression; for( BaseNode n : iterationContexts.getElements() ) { this.iterationContexts.add( (IterationContextNode) n ); } } public List<IterationContextNode> getIterationContexts() { return iterationContexts; } public void setIterationContexts(List<IterationContextNode> iterationContexts) { this.iterationContexts = iterationContexts; } public BaseNode getExpression() { return expression; } public void setExpression(BaseNode expression) { this.expression = expression; } @Override public Object evaluate(EvaluationContext ctx) { try { ctx.enterFrame(); List results = new ArrayList( ); ctx.setValue("partial", results); ForIteration[] ictx = initializeContexts( ctx, iterationContexts); while ( nextIteration( ctx, ictx ) ) { Object result = expression.evaluate( ctx ); results.add( result ); } return results; } catch (EndpointOfRangeNotOfNumberException e) { // ast error already reported return null; } finally { ctx.exitFrame(); } } private boolean nextIteration( EvaluationContext ctx, ForIteration[] ictx ) { int i = ictx.length-1; while ( i >= 0 && i < ictx.length ) { if ( ictx[i].hasNextValue() ) { setValueIntoContext( ctx, ictx[i] ); i++; } else { i--; } } return i >= 0; } private void setValueIntoContext(EvaluationContext ctx, ForIteration forIteration) { ctx.setValue( forIteration.getName(), forIteration.getNextValue() ); } @Override public Type getResultType() { return BuiltInType.LIST; } private ForIteration[] initializeContexts(EvaluationContext ctx, List<IterationContextNode> iterationContexts) { ForIteration[] ictx = new ForIteration[iterationContexts.size()]; int i = 0; for ( IterationContextNode icn : iterationContexts ) { ictx[i] = createQuantifiedExpressionIterationContext( ctx, icn ); if( i < iterationContexts.size()-1 && ictx[i].hasNextValue() ) { setValueIntoContext( ctx, ictx[i] ); } i++; } return ictx; } private static class EndpointOfRangeNotOfNumberException extends RuntimeException { private static final long serialVersionUID = 1L; } private ForIteration createQuantifiedExpressionIterationContext(EvaluationContext ctx, IterationContextNode icn) { ForIteration fi = null; String name = icn.evaluateName( ctx ); Object result = icn.evaluate( ctx ); Object rangeEnd = icn.evaluateRangeEnd(ctx); if (rangeEnd == null) { Iterable values = result instanceof Iterable ? (Iterable) result : Collections.singletonList(result); fi = new ForIteration(name, values); } else { valueMustBeANumber(ctx, result); BigDecimal start = (BigDecimal) result; valueMustBeANumber(ctx, rangeEnd); BigDecimal end = (BigDecimal) rangeEnd; fi = new ForIteration(name, start, end); } return fi; } private void valueMustBeANumber(EvaluationContext ctx, Object value) { if (!(value instanceof BigDecimal)) { ctx.notifyEvt(astEvent(Severity.ERROR, Msg.createMessage(Msg.VALUE_X_NOT_A_VALID_ENDPOINT_FOR_RANGE_BECAUSE_NOT_A_NUMBER, value), null)); throw new EndpointOfRangeNotOfNumberException(); } } private static class ForIteration { private String name; private Iterable values; private Supplier<Iterator> iteratorGenerator; private Iterator iterator; public ForIteration(String name, Iterable values) { this.name = name; this.values = values; this.iteratorGenerator = () -> this.values.iterator(); } public ForIteration(String name, final BigDecimal start, final BigDecimal end) { this.name = name; this.iteratorGenerator = () -> new BigDecimalRangeIterator(start, end); } public boolean hasNextValue() { if( iterator == null ) { iterator = iteratorGenerator.get(); } boolean hasValue = this.iterator.hasNext(); if( ! hasValue ) { this.iterator = null; } return hasValue; } public Object getNextValue() { return iterator != null ? iterator.next() : null; } public String getName() { return name; } } public static class BigDecimalRangeIterator implements Iterator<BigDecimal> { private enum Direction { ASCENDANT, DESCENDANT; } private final BigDecimal start; private final BigDecimal end; private BigDecimal cursor; private final Direction direction; private final BigDecimal increment; public BigDecimalRangeIterator(BigDecimal start, BigDecimal end) { this.start = start; this.end = end; this.direction = (start.compareTo(end) <= 0) ? Direction.ASCENDANT : Direction.DESCENDANT; this.increment = (direction == Direction.ASCENDANT) ? new BigDecimal(1, MathContext.DECIMAL128) : new BigDecimal(-1, MathContext.DECIMAL128); } @Override public boolean hasNext() { if (cursor == null) { return true; } else { BigDecimal lookAhead = cursor.add(increment); if (direction == Direction.ASCENDANT) { return lookAhead.compareTo(end) <= 0; } else { return lookAhead.compareTo(end) >= 0; } } } @Override public BigDecimal next() { if (cursor == null) { cursor = start; } else { cursor = cursor.add(increment); } return cursor; } } }
package de.oc.dbdoc.test; import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import de.oc.dbdoc.ant.Diagram; import de.oc.dbdoc.ant.Include; import de.oc.dbdoc.ant.OrcasDbDoc; import de.oc.dbdoc.ant.Styles; import de.oc.dbdoc.ant.Tablegroup; import de.oc.dbdoc.export.DotExport; import de.oc.dbdoc.export.DotExport.GraphAssociation; import de.oc.dbdoc.export.DotWriter; import de.oc.dbdoc.export.DotWriterImpl; import de.oc.dbdoc.graphdata.Graph; import de.oc.dbdoc.graphdata.GraphForDiagram; import de.oc.dbdoc.graphdata.GraphForSingleTable; import de.oc.dbdoc.graphdata.GraphForSingleTableAncestors; import de.oc.dbdoc.graphdata.GraphForSingleTableDescendants; import de.oc.dbdoc.schemadata.Association; import de.oc.dbdoc.schemadata.Column; import de.oc.dbdoc.schemadata.Schema; import de.oc.dbdoc.schemadata.Table; public class TestGraphPrinting extends BaseTableRegistrySetup { private static final String COLUMN_NAME_3 = "C3"; private static final String COLUMN_NAME_ID = "C_ID"; private static final String COLUMN_NAME_REF = "C_REF"; private enum Mode { ALL_ANCESTORS, NORMAL, ALL_DESCENDANTS } private Table _table1; private Table _table2; private Table _table3; private Table _table5; private Schema _schema; private Styles _styles; private DotWriterTestImpl _dotWriterTestImpl; private Association _association12; private Association _association23; private OrcasDbDoc _orcasDbDoc; private Association _association35; private Table createTable( String pTableName ) { Table lTable = new Table( pTableName ); lTable.addColumn( new Column( COLUMN_NAME_ID, "type" ) ); lTable.addColumn( new Column( COLUMN_NAME_REF, "type" ) ); lTable.addColumn( new Column( COLUMN_NAME_3, "type" ) ); _schema.addTable( lTable ); return lTable; } @Before public void setup() { _schema = new Schema(); _table1 = createTable( "T1" ); _table2 = createTable( "T2" ); _table3 = createTable( "T3" ); createTable( "T4" ); _table5 = createTable( "T5" ); _association12 = createAssociation( _table1, _table2 ); _association23 = createAssociation( _table2, _table3 ); _association35 = createAssociation( _table3, _table5 ); _orcasDbDoc = new OrcasDbDoc(); _styles = _orcasDbDoc.createStyles(); _styles.createTables(); _styles.createDiagrams(); _dotWriterTestImpl = new DotWriterTestImpl(); } private Association createAssociation( Table pTableFrom, Table pTableTo ) { Association lAssociation = new Association( pTableFrom.getName() + "_TO_" + pTableTo.getName(), pTableFrom, pTableTo, true, 0, Association.MULTIPLICITY_N, 1, 1 ); lAssociation.addColumnFrom( COLUMN_NAME_REF ); lAssociation.addColumnTo( COLUMN_NAME_ID ); _schema.addAssociation( lAssociation ); return lAssociation; } @Test public void testSingleTable1() { expectTableFull( _table1 ); expectTableRefonly( _table2, COLUMN_NAME_ID ); expectAssociation( _association12 ); runExporSingleTable( _table1, false ); } @Test public void testSingleTable1WithAllAncestors() { expectTableFull( _table1 ); expectTableRefonly( _table2, COLUMN_NAME_ID, COLUMN_NAME_REF ); expectTableRefonly( _table3, COLUMN_NAME_ID, COLUMN_NAME_REF ); expectTableRefonly( _table5, COLUMN_NAME_ID ); expectAssociation( _association12 ); expectAssociation( _association23 ); expectAssociation( _association35 ); runExporSingleTable( _table1, false, Mode.ALL_ANCESTORS ); } @Test public void testSingleTable2WithAllAncestors() { expectTableFull( _table2 ); expectTableRefonly( _table3, COLUMN_NAME_ID, COLUMN_NAME_REF ); expectTableRefonly( _table5, COLUMN_NAME_ID ); expectAssociation( _association23 ); expectAssociation( _association35 ); runExporSingleTable( _table2, false, Mode.ALL_ANCESTORS ); } @Test public void testSingleTable3WithAllDescendants() { expectTableFull( _table3 ); expectTableRefonly( _table2, COLUMN_NAME_ID, COLUMN_NAME_REF ); expectTableRefonly( _table1, COLUMN_NAME_REF ); expectAssociation( _association12 ); expectAssociation( _association23 ); runExporSingleTable( _table3, false, Mode.ALL_DESCENDANTS ); } @Test public void testSingleTable1OutrefOnly() { expectTableFull( _table1 ); expectTableRefonly( _table2, COLUMN_NAME_ID ); expectAssociation( _association12 ); runExporSingleTable( _table1, true ); } @Test public void testSingleTable2() { expectTableFull( _table2 ); expectTableRefonly( _table1, COLUMN_NAME_REF ); expectTableRefonly( _table3, COLUMN_NAME_ID ); expectAssociation( _association12 ); expectAssociation( _association23 ); runExporSingleTable( _table2, false ); } @Test public void testSingleTable2OutrefOnly() { expectTableFull( _table2 ); expectTableRefonly( _table3, COLUMN_NAME_ID ); expectAssociation( _association23 ); runExporSingleTable( _table2, true ); } @Test public void testGraphTable1Table3() { Diagram lDiagram = new Diagram(); lDiagram.setLabel( "D1" ); lDiagram.setTablegroup( createTableGroup( _table1, _table3 ) ); expectTableFull( _table1 ); expectTableFull( _table3 ); expectTableRefonly( _table2, COLUMN_NAME_ID, COLUMN_NAME_REF ); expectTableRefonly( _table5, COLUMN_NAME_ID ); expectAssociation( _association12 ); expectAssociation( _association23 ); expectAssociation( _association35 ); runExportForDiagram( lDiagram, false ); } @Test public void testGraphTable1Table3OutrefOnly() { Diagram lDiagram = new Diagram(); lDiagram.setLabel( "D1" ); lDiagram.setTablegroup( createTableGroup( _table1, _table3 ) ); expectTableFull( _table1 ); expectTableFull( _table3 ); expectTableRefonly( _table2, COLUMN_NAME_ID ); expectTableRefonly( _table5, COLUMN_NAME_ID ); expectAssociation( _association12 ); expectAssociation( _association35 ); runExportForDiagram( lDiagram, true ); } @Test public void testGraphTable1Table2() { Diagram lDiagram = new Diagram(); lDiagram.setLabel( "D1" ); lDiagram.setTablegroup( createTableGroup( _table1, _table2 ) ); expectTableFull( _table1 ); expectTableFull( _table2 ); expectTableRefonly( _table3, COLUMN_NAME_ID ); expectAssociation( _association12 ); expectAssociation( _association23 ); runExportForDiagram( lDiagram, false ); } private String createTableGroup( Table... pTables ) { Tablegroup lTablegroup = _orcasDbDoc.createTableregistry().createTablegroup(); String lName = "tg1"; lTablegroup.setName( lName ); for( Table lTable : pTables ) { Include lInclude = lTablegroup.createInclude(); lInclude.setName( lTable.getName() ); } return lName; } private void runExporSingleTable( Table pTable, boolean pOutRefsOnly, Mode pAllAncestors ) { GraphForSingleTable lGraphForSingleTable; switch( pAllAncestors ) { case ALL_ANCESTORS: lGraphForSingleTable = new GraphForSingleTableAncestors( pTable, new ArrayList<Graph>(), _styles, tableregistry ); break; case ALL_DESCENDANTS: lGraphForSingleTable = new GraphForSingleTableDescendants( pTable, new ArrayList<Graph>(), _styles, tableregistry ); break; case NORMAL: lGraphForSingleTable = new GraphForSingleTable( pTable, new ArrayList<Graph>(), _styles, tableregistry ); break; default: throw new RuntimeException(); } runExport( lGraphForSingleTable, pOutRefsOnly ); } private void runExporSingleTable( Table pTable, boolean pOutRefsOnly ) { runExporSingleTable( pTable, pOutRefsOnly, Mode.NORMAL ); } private void runExportForDiagram( Diagram pDiagram, boolean pOutRefsOnly ) { runExport( new GraphForDiagram( pDiagram, _styles, null, tableregistry ), pOutRefsOnly ); } private void runExport( Graph pGraph, boolean pOutRefsOnly ) { new DotExport().export( pGraph, _schema, _dotWriterTestImpl, pOutRefsOnly ); StringWriter lStringWriter = new StringWriter(); new DotExport().export( pGraph, _schema, new DotWriterImpl( lStringWriter ), pOutRefsOnly ); System.out.println( lStringWriter ); _dotWriterTestImpl.assertExpectedResults(); } public void expectTableRefonly( Table pTable, String... pColumns ) { _dotWriterTestImpl.expectTableRefonly( pTable, pColumns ); } public void expectTableFull( Table pTable ) { _dotWriterTestImpl.expectTableFull( pTable ); } public void expectAssociation( Association pAssociation ) { _dotWriterTestImpl.expectAssociation( pAssociation ); } private class DotWriterTestImpl implements DotWriter { private List<Table> _tablesFull = new ArrayList(); private List<Table> _tablesRefonly = new ArrayList(); private Map<Table,String[]> _tablesRefonlyColumns = new HashMap(); private List<Association> _associations = new ArrayList(); private List<Table> _expectedTablesFull = new ArrayList(); private List<Table> _expectedTablesRefonly = new ArrayList(); private Map<Table,String[]> _expectedTablesRefonlyColumns = new HashMap(); private List<Association> _expectedAssociations = new ArrayList(); public void expectTableRefonly( Table pTable, String... pColumns ) { _expectedTablesRefonly.add( pTable ); _expectedTablesRefonlyColumns.put( pTable, pColumns ); } public void expectTableFull( Table pTable ) { _expectedTablesFull.add( pTable ); } public void expectAssociation( Association pAssociation ) { _expectedAssociations.add( pAssociation ); } public void assertExpectedResults() { Assert.assertEquals( buildCompareString( _expectedTablesFull, _expectedTablesRefonly, _expectedTablesRefonlyColumns, _expectedAssociations ), buildCompareString( _tablesFull, _tablesRefonly, _tablesRefonlyColumns, _associations ) ); } private String buildCompareString( List<Table> pTablesFull, List<Table> pTablesRefonly, Map<Table,String[]> pTablesRefonlyColumns, List<Association> pAssociations ) { String lCompare = ""; lCompare += getTablesString( "full", pTablesFull, null ); lCompare += getTablesString( "refonly", pTablesRefonly, pTablesRefonlyColumns ); List<Table> lAllTables = new ArrayList( _schema.getTables() ); lAllTables.removeAll( pTablesFull ); lAllTables.removeAll( pTablesRefonly ); lCompare += getTablesString( "none", lAllTables, null ); lCompare += getAssociationString( "associations", pAssociations ); List<Association> lAllAssociations = new ArrayList( _schema.getAssociations() ); lAllAssociations.removeAll( pAssociations ); lCompare += getAssociationString( "none-associations", lAllAssociations ); return lCompare; } private String getTablesString( String pString, List<Table> pTables, Map<Table,String[]> pTablesColumns ) { List<String> lTableNames = new ArrayList<String>(); Map<String,Table> lTableToNAmeMap = new HashMap(); for( Table lTable : pTables ) { lTableNames.add( lTable.getName() ); lTableToNAmeMap.put( lTable.getName(), lTable ); } Collections.sort( lTableNames ); String lReturn = pString + ": "; for( String lTableName : lTableNames ) { lReturn += ","; lReturn += lTableName; if( pTablesColumns != null ) { lReturn += "("; List lColumnNames = new ArrayList( Arrays.asList( pTablesColumns.get( lTableToNAmeMap.get( lTableName ) ) ) ); Collections.sort( lColumnNames ); lReturn += lColumnNames; lReturn += ")"; } } lReturn += "\n"; return lReturn; } private String getAssociationString( String pString, List<Association> pAssociations ) { List<String> lTableNames = new ArrayList<String>(); for( Association lAssociation : pAssociations ) { lTableNames.add( lAssociation.getAssociationName() ); } Collections.sort( lTableNames ); String lReturn = pString + ": "; for( String lTableName : lTableNames ) { lReturn += ","; lReturn += lTableName; } lReturn += "\n"; return lReturn; } public void printHeaderStart( String pStyleForGraph ) { } public void printHeaderEnd() { } public void printGraph( Graph pGraph, String pCommonStyle, boolean pOutRefsOnly ) { } public void printSubGraphStartFilled( Graph pGraph, boolean pOutRefsOnly ) { } public void printSubGraphEnd() { } public void printSubGraphStartDashed( Graph pGraph, boolean pOutRefsOnly ) { } public void printGraphAssociation( GraphAssociation pGraphAssociation ) { } public void printTable( Table pTable, List<Association> pVisibleAssociation, boolean pIsOutref, String pStyle, List<Column> pFilteredColumns ) { if( pVisibleAssociation == null ) { _tablesFull.add( pTable ); } else { String[] lColumnNames = new String[pFilteredColumns.size()]; for( int i = 0; i < lColumnNames.length; i++ ) { lColumnNames[i] = pFilteredColumns.get( i ).getColumnName(); } _tablesRefonly.add( pTable ); _tablesRefonlyColumns.put( pTable, lColumnNames ); } } public void printAssociation( Association pAssociation, Graph pGraph ) { _associations.add( pAssociation ); } } }
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt package com.google.appinventor.common.utils; import com.google.common.base.CharMatcher; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.Set; /** * Helper class for working with strings. * */ public final class StringUtils { private StringUtils() { } /** * A {@link CharMatcher} that matches valid filename characters: * [0-9],[a-z],[A-Z],'_','.', and '-' */ public static final CharMatcher VALID_FILENAME_CHARS = CharMatcher.inRange('0', '9') .or(CharMatcher.inRange('a', 'z')) .or(CharMatcher.inRange('A', 'Z')) .or(CharMatcher.is('_')) .or(CharMatcher.is('.')) .or(CharMatcher.is('-')); /** * Returns the given string enclosed with quotation marks. * * @param str string to quote * @return quoted string * @throws NullPointerException if {@code str} is {@code null} */ public static String quote(String str) { return '"' + str.toString() + '"'; } /** * Returns the given quoted string without quotation marks. * * @param str quoted string * @return string without quotation marks * @throws IllegalArgumentException if {@code str} doesn't have a leading or * a trailing quotation mark * @throws NullPointerException if {@code str} is {@code null} */ public static String unquote(String str) { int lastIndex = str.length() - 1; if (lastIndex <= 0 || str.charAt(0) != '"' || str.charAt(lastIndex) != '"') { throw new IllegalArgumentException("Attempting to unquote string without quotes!"); } return str.substring(1, lastIndex); } /** * Creates new string to display nicely in HTML. * * @param str string to escape * @return escaped string or {@code null} if {@code str} was {@code null} */ public static String escape(String str) { if (str != null) { str = str.replaceAll("&", "&amp;"). replaceAll("<", "&lt;"). replaceAll(">", "&gt;"). replaceAll("\"", "&quot;"). replaceAll("\n", "<br>"); } return str; } /** * Indicates whether an array contains the given string. * * @param array array to check * @param string string to look for * @return {@code true} if the string was found in the array, {@code false} * otherwise * @throws NullPointerException if either {@code array} or {@code string} is * {@code null} */ public static boolean contains(String[] array, String string) { for (String s : array) { if (string.equals(s)) { return true; } } return false; } /** * Returns a string consisting of the joined string array elements, * separated by the delimiter. * * @param delimiter separates individual strings in the joined string * @param strings strings to join * @return string resulting from joining the individual strings */ public static String join(String delimiter, final String[] strings) { return join(delimiter, Arrays.asList(strings)); } /** * Returns a string consisting of the joined strings, separated by the * delimiter. * * @param delimiter separates array elements in created string * @param strings string elements to join * @return string created of joined string array elements */ public static String join(String delimiter, Iterable<String> strings) { Preconditions.checkNotNull(delimiter); Preconditions.checkNotNull(strings); StringBuilder sb = new StringBuilder(); String separator = ""; for (String string : strings) { sb.append(separator); sb.append(string); separator = delimiter; } return sb.toString(); } /** * Returns a semi-unique legal package name for a user. * * @param email the user's email address * @return package name */ public static String userToPackageName(String email) { StringBuilder sb = new StringBuilder("appinventor.ai_"); int length = email.length(); for (int i = 0; i < length; i++) { char ch = email.charAt(i); if (ch == '@') { break; } if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || (ch == '_')) { sb.append(ch); } else if (ch == '.') { sb.append('_'); } } return sb.toString(); } /** * Return the package for project, given the user's email address and the project name. * * @param userEmail the user's email address * @param projectName the project name * @return package name */ public static String getProjectPackage(String userEmail, String projectName) { return userToPackageName(userEmail) + "." + projectName; } /** * Return the qualified name of Screen1 in a project, given the user's email address and the * project name. * * @param userEmail the user's email address * @param projectName the project name * @return qualified form name */ public static String getQualifiedFormName(String userEmail, String projectName) { return getProjectPackage(userEmail, projectName) + ".Screen1"; } /** * Returns a new String resulting from replacing the last occurrence of * target in string with replacement. If target does not occur in string, * string is returned. * * @param string the original string * @param target the value to be replaced * @param replacement the replacement value * @return the resulting string */ public static String replaceLastOccurrence(String string, String target, String replacement) { if (string.length() > 0 && target.length() > 0) { int lastIndexOfTarget = string.lastIndexOf(target); if (lastIndexOfTarget != -1) { return string.substring(0, lastIndexOfTarget) + replacement + string.substring(lastIndexOfTarget + target.length()); } } return string; } /** * Autogenerates a projectname and verifies it does not already exist in * {@code existingProjectNames} */ public static String createProjectName(Set<String> existingProjectNames) { String prefix = "project"; int highIndex = 0; int prefixLength = prefix.length(); for (String name : existingProjectNames) { try { if (name.startsWith(prefix)) { highIndex = Math.max(highIndex, Integer.parseInt(name.substring(prefixLength))); } } catch (NumberFormatException e) { continue; } } return prefix + (highIndex + 1); } /** * Create a name safe for use in file paths from the provided String {@code * str}. It is fairly conservative to attempt, not guarantee, maximum * compatability for most operating systems. */ public static String normalizeForFilename(String str) { String normalized = VALID_FILENAME_CHARS.retainFrom(str); if (!normalized.isEmpty()) { while (normalized.length() > 2 && !CharMatcher.JAVA_LETTER.matches(normalized.charAt(0))) { normalized = normalized.substring(1); } if (CharMatcher.JAVA_LETTER.matches(normalized.charAt(0))) { return normalized; } } return null; } /** * Converts a String to a JSON String. * Returns null if the String is null. */ public static String toJson(String s) { if (s != null) { StringBuilder sb = new StringBuilder(); sb.append('"'); int len = s.length(); for (int i = 0; i < len; i++) { char c = s.charAt(i); switch (c) { case '\\': case '"': case '/': sb.append('\\').append(c); break; case '\b': sb.append("\\b"); break; case '\f': sb.append("\\f"); break; case '\n': sb.append("\\n"); break; case '\r': sb.append("\\r"); break; case '\t': sb.append("\\t"); break; default: if (c < ' ' || c > '~') { // Replace any special chars with \u1234 unicode String hex = "000" + Integer.toHexString(c); hex = hex.substring(hex.length() - 4); sb.append("\\u" + hex); } else { sb.append(c); } break; } } sb.append('"'); return sb.toString(); } else { return null; } } }