repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
1fish2/the-blue-alliance-android | android/src/main/java/com/thebluealliance/androidclient/datafeed/retrofit/GitHubAPI.java | 539 | package com.thebluealliance.androidclient.datafeed.retrofit;
import com.google.gson.JsonArray;
import retrofit2.http.GET;
import retrofit2.http.Path;
import rx.Observable;
/**
* Interface to access the GitHub API in relation to the app repo
* Used in {@link com.thebluealliance.androidclient.fragments.ContributorsFragment}
*/
public interface GitHubAPI {
@GET("/repos/{user}/{repo}/contributors")
Observable<JsonArray> fetchRepoContributors(
@Path("user") String user,
@Path("repo") String repo);
}
| mit |
007slm/kissy | tools/module-compiler/tests/com/google/javascript/jscomp/PeepholeRemoveDeadCodeTest.java | 16881 | /*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
/**
* Tests for PeepholeRemoveDeadCodeTest in isolation. Tests for the interaction
* of multiple peephole passes are in PeepholeIntegrationTest.
*/
public class PeepholeRemoveDeadCodeTest extends CompilerTestCase {
public PeepholeRemoveDeadCodeTest() {
super("");
}
@Override
public void setUp() throws Exception {
super.setUp();
enableLineNumberCheck(true);
}
@Override
public CompilerPass getProcessor(final Compiler compiler) {
PeepholeOptimizationsPass peepholePass =
new PeepholeOptimizationsPass(compiler, new PeepholeRemoveDeadCode());
return peepholePass;
}
@Override
protected int getNumRepetitions() {
// Reduce this to 2 if we get better expression evaluators.
return 2;
}
private void foldSame(String js) {
testSame(js);
}
private void fold(String js, String expected) {
test(js, expected);
}
public void testFoldBlock() {
fold("{{foo()}}", "foo()");
fold("{foo();{}}", "foo()");
fold("{{foo()}{}}", "foo()");
fold("{{foo()}{bar()}}", "foo();bar()");
fold("{if(false)foo(); {bar()}}", "bar()");
fold("{if(false)if(false)if(false)foo(); {bar()}}", "bar()");
fold("{'hi'}", "");
fold("{x==3}", "");
fold("{ (function(){x++}) }", "");
fold("function(){return;}", "function(){return;}");
fold("function(){return 3;}", "function(){return 3}");
fold("function(){if(x)return; x=3; return; }",
"function(){if(x)return; x=3; return; }");
fold("{x=3;;;y=2;;;}", "x=3;y=2");
// Cases to test for empty block.
fold("while(x()){x}", "while(x());");
fold("while(x()){x()}", "while(x())x()");
fold("for(x=0;x<100;x++){x}", "for(x=0;x<100;x++);");
fold("for(x in y){x}", "for(x in y);");
}
/** Try to remove spurious blocks with multiple children */
public void testFoldBlocksWithManyChildren() {
fold("function f() { if (false) {} }", "function f(){}");
fold("function f() { { if (false) {} if (true) {} {} } }",
"function f(){}");
fold("{var x; var y; var z; function f() { { var a; { var b; } } } }",
"var x;var y;var z;function f(){var a;var b}");
}
public void testIf() {
fold("if (1){ x=1; } else { x = 2;}", "x=1");
fold("if (false){ x = 1; } else { x = 2; }", "x=2");
fold("if (undefined){ x = 1; } else { x = 2; }", "x=2");
fold("if (null){ x = 1; } else { x = 2; }", "x=2");
fold("if (void 0){ x = 1; } else { x = 2; }", "x=2");
fold("if (void foo()){ x = 1; } else { x = 2; }",
"foo();x=2");
fold("if (false){ x = 1; } else if (true) { x = 3; } else { x = 2; }",
"x=3");
fold("if (x){ x = 1; } else if (false) { x = 3; }",
"if(x)x=1");
}
public void testHook() {
fold("true ? a() : b()", "a()");
fold("false ? a() : b()", "b()");
fold("a() ? b() : true", "a() && b()");
fold("a() ? true : b()", "a() || b()");
fold("(a = true) ? b() : c()", "a = true; b()");
fold("(a = false) ? b() : c()", "a = false; c()");
fold("do {f()} while((a = true) ? b() : c())",
"do {f()} while((a = true) , b())");
fold("do {f()} while((a = false) ? b() : c())",
"do {f()} while((a = false) , c())");
fold("var x = (true) ? 1 : 0", "var x=1");
fold("var y = (true) ? ((false) ? 12 : (cond ? 1 : 2)) : 13",
"var y=cond?1:2");
foldSame("var z=x?void 0:y()");
foldSame("z=x?void 0:y()");
foldSame("z*=x?void 0:y()");
foldSame("var z=x?y():void 0");
foldSame("(w?x:void 0).y=z");
foldSame("(w?x:void 0).y+=z");
}
public void testConstantConditionWithSideEffect1() {
fold("if (b=true) x=1;", "b=true;x=1");
fold("if (b=/ab/) x=1;", "b=/ab/;x=1");
fold("if (b=/ab/){ x=1; } else { x=2; }", "b=/ab/;x=1");
fold("var b;b=/ab/;if(b)x=1;", "var b;b=/ab/;x=1");
foldSame("var b;b=f();if(b)x=1;");
fold("var b=/ab/;if(b)x=1;", "var b=/ab/;x=1");
foldSame("var b=f();if(b)x=1;");
foldSame("b=b++;if(b)x=b;");
fold("(b=0,b=1);if(b)x=b;", "b=0;b=1;x=b;");
fold("b=1;if(foo,b)x=b;","b=1;x=b;");
foldSame("b=1;if(foo=1,b)x=b;");
}
public void testConstantConditionWithSideEffect2() {
fold("(b=true)?x=1:x=2;", "b=true;x=1");
fold("(b=false)?x=1:x=2;", "b=false;x=2");
fold("if (b=/ab/) x=1;", "b=/ab/;x=1");
fold("var b;b=/ab/;(b)?x=1:x=2;", "var b;b=/ab/;x=1");
foldSame("var b;b=f();(b)?x=1:x=2;");
fold("var b=/ab/;(b)?x=1:x=2;", "var b=/ab/;x=1");
foldSame("var b=f();(b)?x=1:x=2;");
}
public void testVarLifting() {
fold("if(true)var a", "var a");
fold("if(false)var a", "var a");
// More var lifting tests in PeepholeIntegrationTests
}
public void testFoldUselessWhile() {
fold("while(false) { foo() }", "");
fold("while(void 0) { foo() }", "");
fold("while(undefined) { foo() }", "");
foldSame("while(true) foo()");
fold("while(false) { var a = 0; }", "var a");
// Make sure it plays nice with minimizing
fold("while(false) { foo(); continue }", "");
fold("while(0) { foo() }", "");
}
public void testFoldUselessFor() {
fold("for(;false;) { foo() }", "");
fold("for(;void 0;) { foo() }", "");
fold("for(;undefined;) { foo() }", "");
fold("for(;true;) foo() ", "for(;;) foo() ");
foldSame("for(;;) foo()");
fold("for(;false;) { var a = 0; }", "var a");
// Make sure it plays nice with minimizing
fold("for(;false;) { foo(); continue }", "");
}
public void testFoldUselessDo() {
fold("do { foo() } while(false);", "foo()");
fold("do { foo() } while(void 0);", "foo()");
fold("do { foo() } while(undefined);", "foo()");
fold("do { foo() } while(true);", "do { foo() } while(true);");
fold("do { var a = 0; } while(false);", "var a=0");
// Can't fold with break or continues.
foldSame("do { foo(); continue; } while(0)");
foldSame("do { foo(); break; } while(0)");
}
public void testMinimizeWhileConstantCondition() {
fold("while(true) foo()", "while(true) foo()");
fold("while(0) foo()", "");
fold("while(0.0) foo()", "");
fold("while(NaN) foo()", "");
fold("while(null) foo()", "");
fold("while(undefined) foo()", "");
fold("while('') foo()", "");
}
public void testFoldConstantCommaExpressions() {
fold("if (true, false) {foo()}", "");
fold("if (false, true) {foo()}", "foo()");
fold("true, foo()", "foo()");
fold("(1 + 2 + ''), foo()", "foo()");
}
public void testSplitCommaExpressions() {
// Don't try to split in expressions.
foldSame("while (foo(), true) boo()");
foldSame("var a = (foo(), true);");
foldSame("a = (foo(), true);");
// Don't try to split COMMA under LABELs.
foldSame("a:a(),b()");
fold("(x=2), foo()", "x=2; foo()");
fold("foo(), boo();", "foo(); boo()");
fold("(a(), b()), (c(), d());", "a(); b(); c(); d();");
fold("foo(), true", "foo();");
fold("function x(){foo(), true}", "function x(){foo();}");
}
public void testRemoveUselessOps() {
// There are four place where expression results are discarded:
// - a top level expression EXPR_RESULT
// - the LHS of a COMMA
// - the FOR init expression
// - the FOR increment expression
// Known side-effect free functions calls are removed.
fold("Math.random()", "");
fold("Math.random(f() + g())", "f(); g();");
fold("Math.random(f(),g(),h())", "f();g();h();");
// Calls to functions with unknown side-effects are are left.
foldSame("f();");
foldSame("(function () {})();");
// Uncalled function expressions are removed
fold("(function () {});", "");
fold("(function f() {});", "");
// ... including any code they contain.
fold("(function () {foo();});", "");
// Useless operators are removed.
fold("+f()", "f()");
fold("a=(+f(),g())", "a=(f(),g())");
fold("a=(true,g())", "a=g()");
fold("f(),true", "f()");
fold("f() + g()", "f();g()");
fold("for(;;+f()){}", "for(;;f()){}");
fold("for(+f();;g()){}", "for(f();;g()){}");
fold("for(;;Math.random(f(),g(),h())){}", "for(;;f(),g(),h()){}");
// The optimization cascades into conditional expressions:
fold("g() && +f()", "g() && f()");
fold("g() || +f()", "g() || f()");
fold("x ? g() : +f()", "x ? g() : f()");
fold("+x()", "x()");
fold("+x() * 2", "x()");
fold("-(+x() * 2)", "x()");
fold("2 -(+x() * 2)", "x()");
fold("x().foo", "x()");
foldSame("x().foo()");
foldSame("x++");
foldSame("++x");
foldSame("x--");
foldSame("--x");
foldSame("x = 2");
foldSame("x *= 2");
// Sanity check, other expression are left alone.
foldSame("function f() {}");
foldSame("var x;");
}
public void testOptimizeSwitch() {
fold("switch(a){}", "");
fold("switch(foo()){}", "foo()");
fold("switch(a){default:}", "");
fold("switch(a){default:break;}", "");
fold("switch(a){default:var b;break;}", "var b");
fold("switch(a){case 1: default:}", "");
fold("switch(a){default: case 1:}", "");
fold("switch(a){default: break; case 1:break;}", "");
fold("switch(a){default: var b; break; case 1: var c; break;}",
"var c; var b;");
// Can't remove cases if a default exists.
foldSame("function f() {switch(a){default: return; case 1: break;}}");
foldSame("function f() {switch(a){case 1: foo();}}");
foldSame("function f() {switch(a){case 3: case 2: case 1: foo();}}");
fold("function f() {switch(a){case 2: case 1: default: foo();}}",
"function f() {switch(a){default: foo();}}");
fold("switch(a){case 1: default:break; case 2: foo()}",
"switch(a){case 2: foo()}");
foldSame("switch(a){case 1: goo(); default:break; case 2: foo()}");
// TODO(johnlenz): merge the useless "case 2"
foldSame("switch(a){case 1: goo(); case 2:break; case 3: foo()}");
// Can't remove cases if something useful is done.
foldSame("switch(a){case 1: var c =2; break;}");
foldSame("function f() {switch(a){case 1: return;}}");
foldSame("x:switch(a){case 1: break x;}");
}
public void testRemoveNumber() {
test("3", "");
}
public void testRemoveVarGet1() {
test("a", "");
}
public void testRemoveVarGet2() {
test("var a = 1;a", "var a = 1");
}
public void testRemoveNamespaceGet1() {
test("var a = {};a.b", "var a = {}");
}
public void testRemoveNamespaceGet2() {
test("var a = {};a.b=1;a.b", "var a = {};a.b=1");
}
public void testRemovePrototypeGet1() {
test("var a = {};a.prototype.b", "var a = {}");
}
public void testRemovePrototypeGet2() {
test("var a = {};a.prototype.b = 1;a.prototype.b",
"var a = {};a.prototype.b = 1");
}
public void testRemoveAdd1() {
test("1 + 2", "");
}
public void testNoRemoveVar1() {
testSame("var a = 1");
}
public void testNoRemoveVar2() {
testSame("var a = 1, b = 2");
}
public void testNoRemoveAssign1() {
testSame("a = 1");
}
public void testNoRemoveAssign2() {
testSame("a = b = 1");
}
public void testNoRemoveAssign3() {
test("1 + (a = 2)", "a = 2");
}
public void testNoRemoveAssign4() {
testSame("x.a = 1");
}
public void testNoRemoveAssign5() {
testSame("x.a = x.b = 1");
}
public void testNoRemoveAssign6() {
test("1 + (x.a = 2)", "x.a = 2");
}
public void testNoRemoveCall1() {
testSame("a()");
}
public void testNoRemoveCall2() {
test("a()+b()", "a();b()");
}
public void testNoRemoveCall3() {
testSame("a() && b()");
}
public void testNoRemoveCall4() {
testSame("a() || b()");
}
public void testNoRemoveCall5() {
test("a() || 1", "a()");
}
public void testNoRemoveCall6() {
testSame("1 || a()");
}
public void testNoRemoveThrow1() {
testSame("function f(){throw a()}");
}
public void testNoRemoveThrow2() {
testSame("function f(){throw a}");
}
public void testNoRemoveThrow3() {
testSame("function f(){throw 10}");
}
public void testRemoveInControlStructure1() {
test("if(x()) 1", "x()");
}
public void testRemoveInControlStructure2() {
test("while(2) 1", "while(2);");
}
public void testRemoveInControlStructure3() {
test("for(1;2;3) 4", "for(;;);");
}
public void testHook1() {
test("1 ? 2 : 3", "");
}
public void testHook2() {
test("x ? a() : 3", "x && a()");
}
public void testHook3() {
test("x ? 2 : a()", "x || a()");
}
public void testHook4() {
testSame("x ? a() : b()");
}
public void testHook5() {
test("a() ? 1 : 2", "a()");
}
public void testHook6() {
test("a() ? b() : 2", "a() && b()");
}
// TODO(johnlenz): Consider adding a post optimization pass to
// convert OR into HOOK to save parentheses when the operator
// precedents would require them.
public void testHook7() {
test("a() ? 1 : b()", "a() || b()");
}
public void testHook8() {
testSame("a() ? b() : c()");
}
public void testShortCircuit1() {
testSame("1 && a()");
}
public void testShortCircuit2() {
test("1 && a() && 2", "1 && a()");
}
public void testShortCircuit3() {
test("a() && 1 && 2", "a()");
}
public void testShortCircuit4() {
testSame("a() && 1 && b()");
}
public void testComma1() {
test("1, 2", "");
}
public void testComma2() {
test("1, a()", "a()");
}
public void testComma3() {
test("1, a(), b()", "a();b()");
}
public void testComma4() {
test("a(), b()", "a();b()");
}
public void testComma5() {
test("a(), b(), 1", "a();b()");
}
public void testComplex1() {
test("1 && a() + b() + c()", "1 && (a(), b(), c())");
}
public void testComplex2() {
test("1 && (a() ? b() : 1)", "1 && a() && b()");
}
public void testComplex3() {
test("1 && (a() ? b() : 1 + c())", "1 && (a() ? b() : c())");
}
public void testComplex4() {
test("1 && (a() ? 1 : 1 + c())", "1 && (a() || c())");
}
public void testComplex5() {
// can't simplify lhs of short circuit statements with side effects
testSame("(a() ? 1 : 1 + c()) && foo()");
}
public void testNoRemoveFunctionDeclaration1() {
testSame("function foo(){}");
}
public void testNoRemoveFunctionDeclaration2() {
testSame("var foo = function (){}");
}
public void testNoSimplifyFunctionArgs1() {
testSame("f(1 + 2, 3 + g())");
}
public void testNoSimplifyFunctionArgs2() {
testSame("1 && f(1 + 2, 3 + g())");
}
public void testNoSimplifyFunctionArgs3() {
testSame("1 && foo(a() ? b() : 1 + c())");
}
public void testNoRemoveInherits1() {
testSame("var a = {}; this.b = {}; var goog = {}; goog.inherits(b, a)");
}
public void testNoRemoveInherits2() {
test("var a = {}; this.b = {}; var goog = {}; goog.inherits(b, a) + 1",
"var a = {}; this.b = {}; var goog = {}; goog.inherits(b, a)");
}
public void testNoRemoveInherits3() {
testSame("this.a = {}; var b = {}; b.inherits(a);");
}
public void testNoRemoveInherits4() {
test("this.a = {}; var b = {}; b.inherits(a) + 1;",
"this.a = {}; var b = {}; b.inherits(a)");
}
public void testRemoveFromLabel1() {
test("LBL: void 0", "LBL: {}");
}
public void testRemoveFromLabel2() {
test("LBL: foo() + 1 + bar()", "LBL: foo(),bar()");
}
public void testCall1() {
test("Math.sin(0);", "");
}
public void testCall2() {
test("1 + Math.sin(0);", "");
}
public void testNew1() {
test("new Date;", "");
}
public void testNew2() {
test("1 + new Date;", "");
}
public void testFoldAssign() {
test("x=x", "");
testSame("x=xy");
testSame("x=x + 1");
testSame("x.a=x.a");
test("var y=(x=x)", "var y=x");
test("y=1 + (x=x)", "y=1 + x");
}
public void testTryCatchFinally() {
testSame("try {foo()} catch (e) {bar()}");
testSame("try { try {foo()} catch (e) {bar()}} catch (x) {bar()}");
test("try {var x = 1} finally {}", "var x = 1;");
testSame("try {var x = 1} finally {x()}");
test("function f() { return; try{var x = 1}finally{} }",
"function f() { return; var x = 1; }");
}
}
| mit |
johnsteele/windup | rules-java/tests/src/test/java/org/jboss/windup/rules/apps/java/ClassMetadataTest.java | 5150 | package org.jboss.windup.rules.apps.java;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import javax.inject.Inject;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.forge.arquillian.AddonDependencies;
import org.jboss.forge.arquillian.AddonDependency;
import org.jboss.forge.arquillian.archive.AddonArchive;
import org.jboss.forge.furnace.util.Predicate;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.windup.config.RuleProvider;
import org.jboss.windup.config.phase.MigrationRulesPhase;
import org.jboss.windup.engine.predicates.EnumeratedRuleProviderPredicate;
import org.jboss.windup.engine.predicates.RuleProviderWithDependenciesPredicate;
import org.jboss.windup.exec.WindupProcessor;
import org.jboss.windup.exec.configuration.WindupConfiguration;
import org.jboss.windup.exec.rulefilters.AndPredicate;
import org.jboss.windup.exec.rulefilters.NotPredicate;
import org.jboss.windup.graph.GraphContext;
import org.jboss.windup.graph.GraphContextFactory;
import org.jboss.windup.rules.apps.java.config.ScanPackagesOption;
import org.jboss.windup.rules.apps.java.config.SourceModeOption;
import org.jboss.windup.rules.apps.java.decompiler.DecompileClassesRuleProvider;
import org.jboss.windup.rules.apps.java.model.JavaClassModel;
import org.jboss.windup.rules.apps.java.service.JavaClassService;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* @author <a href="mailto:jesse.sightler@gmail.com">Jesse Sightler</a>
*/
@RunWith(Arquillian.class)
public class ClassMetadataTest
{
@Deployment
@AddonDependencies({
@AddonDependency(name = "org.jboss.windup.config:windup-config"),
@AddonDependency(name = "org.jboss.windup.exec:windup-exec"),
@AddonDependency(name = "org.jboss.windup.rules.apps:windup-rules-java"),
@AddonDependency(name = "org.jboss.windup.reporting:windup-reporting"),
@AddonDependency(name = "org.jboss.windup.utils:windup-utils"),
@AddonDependency(name = "org.jboss.forge.furnace.container:cdi")
})
public static AddonArchive getDeployment()
{
return ShrinkWrap.create(AddonArchive.class).addBeansXML();
}
@Inject
private WindupProcessor processor;
@Inject
private GraphContextFactory factory;
@Test
public void testJavaSourceScanning() throws Exception
{
runTest("src/test/resources/classmetadatatest/src");
}
@Test
public void testJavaClassFiles() throws Exception
{
runTest("src/test/resources/classmetadatatest/sampleclasses");
}
private void runTest(String inputPath) throws Exception
{
try (GraphContext context = factory.create(getDefaultPath(), true))
{
final Path outputPath = Paths.get(FileUtils.getTempDirectory().toString(),
"windup_" + RandomStringUtils.randomAlphanumeric(6));
FileUtils.deleteDirectory(outputPath.toFile());
Files.createDirectories(outputPath);
final WindupConfiguration processorConfig = new WindupConfiguration();
processorConfig.setOptionValue(SourceModeOption.NAME, true);
Predicate<RuleProvider> ruleFilter = new AndPredicate(new RuleProviderWithDependenciesPredicate(MigrationRulesPhase.class),
new NotPredicate(new EnumeratedRuleProviderPredicate(DecompileClassesRuleProvider.class)));
processorConfig.setRuleProviderFilter(ruleFilter);
processorConfig.setGraphContext(context);
processorConfig.addInputPath(Paths.get(inputPath));
processorConfig.setOutputDirectory(outputPath);
processorConfig.setOptionValue(ScanPackagesOption.NAME, Collections.singletonList(""));
processor.execute(processorConfig);
JavaClassService javaClassService = new JavaClassService(context);
JavaClassModel javaClassModel = javaClassService.getByName("com.rhc.booking.services.EventServer");
Assert.assertNotNull(javaClassModel);
Assert.assertNull(javaClassModel.getExtends());
Assert.assertNotNull(javaClassModel.getInterfaces());
Assert.assertTrue(javaClassModel.getInterfaces().iterator().hasNext());
Assert.assertTrue(javaClassModel.isInterface());
int interfaceCountFound = 0;
for (JavaClassModel interfce : javaClassModel.getInterfaces())
{
interfaceCountFound++;
Assert.assertEquals("java.rmi.Remote", interfce.getQualifiedName());
}
Assert.assertEquals(1, interfaceCountFound);
}
}
private Path getDefaultPath()
{
return FileUtils.getTempDirectory().toPath().resolve("Windup")
.resolve("windupgraph_classmetadatatest_" + RandomStringUtils.randomAlphanumeric(6));
}
}
| epl-1.0 |
TypeFox/che | plugins/plugin-java/che-plugin-java-ext-jdt/org-eclipse-jdt-ui/src/main/java/org/eclipse/jdt/internal/ui/text/template/contentassist/TemplateProposal.java | 21548 | /**
* ***************************************************************************** Copyright (c) 2000,
* 2013 IBM Corporation and others. All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0 which accompanies this
* distribution, and is available at http://www.eclipse.org/legal/epl-v10.html
*
* <p>Contributors: IBM Corporation - initial API and implementation
* *****************************************************************************
*/
package org.eclipse.jdt.internal.ui.text.template.contentassist;
import static org.eclipse.che.plugin.java.server.dto.DtoServerImpls.LinkedDataImpl;
import static org.eclipse.che.plugin.java.server.dto.DtoServerImpls.LinkedModeModelImpl;
import static org.eclipse.che.plugin.java.server.dto.DtoServerImpls.LinkedPositionGroupImpl;
import static org.eclipse.che.plugin.java.server.dto.DtoServerImpls.RegionImpl;
import org.eclipse.che.ide.ext.java.shared.dto.LinkedModeModel;
import org.eclipse.che.jdt.JavadocFinder;
import org.eclipse.che.jdt.javadoc.HTMLPrinter;
import org.eclipse.che.jdt.javaeditor.HasLinkedModel;
import org.eclipse.che.jface.text.ITextViewer;
import org.eclipse.che.jface.text.contentassist.ICompletionProposal;
import org.eclipse.che.jface.text.contentassist.ICompletionProposalExtension2;
import org.eclipse.che.jface.text.contentassist.ICompletionProposalExtension3;
import org.eclipse.che.jface.text.contentassist.ICompletionProposalExtension4;
import org.eclipse.che.jface.text.contentassist.ICompletionProposalExtension6;
import org.eclipse.che.jface.text.contentassist.IContextInformation;
import org.eclipse.che.jface.text.link.ProposalPosition;
import org.eclipse.che.jface.text.source.LineRange;
import org.eclipse.core.runtime.Assert;
import org.eclipse.jdt.internal.corext.template.java.CompilationUnitContext;
import org.eclipse.jdt.internal.corext.template.java.JavaDocContext;
import org.eclipse.jdt.internal.corext.util.Messages;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.javaeditor.IndentUtil;
import org.eclipse.jdt.ui.text.java.IJavaCompletionProposal;
import org.eclipse.jface.text.BadLocationException;
import org.eclipse.jface.text.BadPositionCategoryException;
import org.eclipse.jface.text.Document;
import org.eclipse.jface.text.DocumentEvent;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.IRegion;
import org.eclipse.jface.text.Position;
import org.eclipse.jface.text.Region;
import org.eclipse.jface.text.link.LinkedPosition;
import org.eclipse.jface.text.templates.DocumentTemplateContext;
import org.eclipse.jface.text.templates.GlobalTemplateVariables;
import org.eclipse.jface.text.templates.Template;
import org.eclipse.jface.text.templates.TemplateBuffer;
import org.eclipse.jface.text.templates.TemplateContext;
import org.eclipse.jface.text.templates.TemplateException;
import org.eclipse.jface.text.templates.TemplateVariable;
import org.eclipse.jface.viewers.StyledCellLabelProvider;
import org.eclipse.jface.viewers.StyledString;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
/** A template proposal. */
public class TemplateProposal
implements IJavaCompletionProposal,
ICompletionProposalExtension2,
ICompletionProposalExtension3,
ICompletionProposalExtension4,
ICompletionProposalExtension6,
HasLinkedModel {
private final Template fTemplate;
private final TemplateContext fContext;
private final Image fImage;
private final IRegion fRegion;
private int fRelevance;
private IRegion fSelectedRegion; // initialized by apply()
private StyledString fDisplayString;
private InclusivePositionUpdater fUpdater;
private LinkedModeModel linkedModeModel;
/**
* Creates a template proposal with a template and its context.
*
* @param template the template
* @param context the context in which the template was requested.
* @param region the region this proposal is applied to
* @param image the icon of the proposal.
*/
public TemplateProposal(Template template, TemplateContext context, IRegion region, Image image) {
Assert.isNotNull(template);
Assert.isNotNull(context);
Assert.isNotNull(region);
fTemplate = template;
fContext = context;
fImage = image;
fRegion = region;
fDisplayString = null;
fRelevance = computeRelevance();
}
/**
* Computes the relevance to match the relevance values generated by the core content assistant.
*
* @return a sensible relevance value.
*/
private int computeRelevance() {
// see org.eclipse.jdt.internal.codeassist.RelevanceConstants
final int R_DEFAULT = 0;
final int R_INTERESTING = 5;
final int R_CASE = 10;
final int R_NON_RESTRICTED = 3;
final int R_EXACT_NAME = 4;
final int R_INLINE_TAG = 31;
int base = R_DEFAULT + R_INTERESTING + R_NON_RESTRICTED;
try {
if (fContext instanceof DocumentTemplateContext) {
DocumentTemplateContext templateContext = (DocumentTemplateContext) fContext;
IDocument document = templateContext.getDocument();
String content = document.get(fRegion.getOffset(), fRegion.getLength());
if (content.length() > 0 && fTemplate.getName().startsWith(content)) base += R_CASE;
if (fTemplate.getName().equalsIgnoreCase(content)) base += R_EXACT_NAME;
if (fContext instanceof JavaDocContext) base += R_INLINE_TAG;
}
} catch (BadLocationException e) {
// ignore - not a case sensitive match then
}
// see CompletionProposalCollector.computeRelevance
// just under keywords, but better than packages
final int TEMPLATE_RELEVANCE = 1;
return base * 16 + TEMPLATE_RELEVANCE;
}
/**
* Returns the template of this proposal.
*
* @return the template of this proposal
* @since 3.1
*/
public final Template getTemplate() {
return fTemplate;
}
/**
* Returns the context in which the template was requested.
*
* @return the context in which the template was requested
* @since 3.1
*/
protected final TemplateContext getContext() {
return fContext;
}
/**
* {@inheritDoc}
*
* @deprecated This method is no longer called by the framework and clients should overwrite
* {@link #apply(ITextViewer, char, int, int)} instead
*/
public final void apply(IDocument document) {
// not called anymore
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#apply(org.eclipse.jface.text.ITextViewer, char, int, int)
*/
public void apply(ITextViewer viewer, char trigger, int stateMask, int offset) {
IDocument document = viewer.getDocument();
try {
fContext.setReadOnly(false);
int start;
TemplateBuffer templateBuffer;
try {
beginCompoundChange(viewer);
int oldReplaceOffset = getReplaceOffset();
try {
// this may already modify the document (e.g. add imports)
templateBuffer = fContext.evaluate(fTemplate);
} catch (TemplateException e1) {
fSelectedRegion = fRegion;
return;
}
start = getReplaceOffset();
int shift = start - oldReplaceOffset;
int end = Math.max(getReplaceEndOffset(), offset + shift);
// insert template string
if (end > document.getLength()) end = offset;
String templateString = templateBuffer.getString();
document.replace(start, end - start, templateString);
} finally {
endCompoundChange(viewer);
}
// translate positions
LinkedModeModelImpl model = new LinkedModeModelImpl();
TemplateVariable[] variables = templateBuffer.getVariables();
MultiVariableGuess guess =
fContext instanceof CompilationUnitContext
? ((CompilationUnitContext) fContext).getMultiVariableGuess()
: null;
boolean hasPositions = false;
for (int i = 0; i != variables.length; i++) {
TemplateVariable variable = variables[i];
if (variable.isUnambiguous()) continue;
LinkedPositionGroupImpl group = new LinkedPositionGroupImpl();
int[] offsets = variable.getOffsets();
int length = variable.getLength();
LinkedPosition first;
if (guess != null && variable instanceof MultiVariable) {
first =
new VariablePosition(
document, offsets[0] + start, length, guess, (MultiVariable) variable);
guess.addSlave((VariablePosition) first);
} else {
String[] values = variable.getValues();
ICompletionProposal[] proposals = new ICompletionProposal[values.length];
for (int j = 0; j < values.length; j++) {
// ensurePositionCategoryInstalled(document, model);
Position pos = new Position(offsets[0] + start, length);
// document.addPosition(getCategory(), pos);
proposals[j] = new PositionBasedCompletionProposal(values[j], pos, length);
}
if (proposals.length > 1)
first = new ProposalPosition(document, offsets[0] + start, length, proposals);
else first = new LinkedPosition(document, offsets[0] + start, length);
}
for (int j = 0; j != offsets.length; j++)
if (j == 0) {
if (first instanceof ProposalPosition) {
RegionImpl region = new RegionImpl();
region.setLength(first.getLength());
region.setOffset(first.getOffset());
LinkedDataImpl data = new LinkedDataImpl();
ICompletionProposal[] choices = ((ProposalPosition) first).getChoices();
if (choices != null) {
for (ICompletionProposal choice : choices) {
data.addValues(choice.getDisplayString());
}
group.setData(data);
}
group.addPositions(region);
} else {
RegionImpl region = new RegionImpl();
region.setLength(first.getLength());
region.setOffset(first.getOffset());
group.addPositions(region);
}
} else {
RegionImpl region = new RegionImpl();
region.setLength(length);
region.setOffset(offsets[j] + start);
group.addPositions(region);
}
model.addGroups(group);
hasPositions = true;
}
if (hasPositions) {
model.setEscapePosition(getCaretOffset(templateBuffer) + start);
this.linkedModeModel = model;
// model.forceInstall();
// JavaEditor editor= getJavaEditor();
// if (editor != null) {
// model.addLinkingListener(new EditorHighlightingSynchronizer(editor));
// }
//
// LinkedModeUI ui= new EditorLinkedModeUI(model, viewer);
// ui.setExitPosition(viewer, getCaretOffset(templateBuffer) + start, 0, Integer.MAX_VALUE);
// ui.enter();
fSelectedRegion = fRegion; //ui.getSelectedRegion();
} else {
fSelectedRegion = new Region(getCaretOffset(templateBuffer) + start, 0);
}
} catch (BadLocationException e) {
JavaPlugin.log(e);
// openErrorDialog(viewer.getTextWidget().getShell(), e);
fSelectedRegion = fRegion;
}
}
private void endCompoundChange(ITextViewer viewer) {
// if (viewer instanceof ITextViewerExtension) {
// ITextViewerExtension extension= (ITextViewerExtension) viewer;
// IRewriteTarget target= extension.getRewriteTarget();
// target.endCompoundChange();
// }
}
private void beginCompoundChange(ITextViewer viewer) {
// if (viewer instanceof ITextViewerExtension) {
// ITextViewerExtension extension= (ITextViewerExtension) viewer;
// IRewriteTarget target= extension.getRewriteTarget();
// target.beginCompoundChange();
// }
}
// /**
// * Returns the currently active java editor, or <code>null</code> if it
// * cannot be determined.
// *
// * @return the currently active java editor, or <code>null</code>
// */
// private JavaEditor getJavaEditor() {
// IEditorPart part= JavaPlugin.getActivePage().getActiveEditor();
// if (part instanceof JavaEditor)
// return (JavaEditor) part;
// else
// return null;
// }
// private void ensurePositionCategoryInstalled(final IDocument document, LinkedModeModel model) {
// if (!document.containsPositionCategory(getCategory())) {
// document.addPositionCategory(getCategory());
// fUpdater= new InclusivePositionUpdater(getCategory());
// document.addPositionUpdater(fUpdater);
//
// model.addLinkingListener(new ILinkedModeListener() {
//
// /*
// * @see org.eclipse.jface.text.link.ILinkedModeListener#left(org.eclipse.jface.text.link.LinkedModeModel, int)
// */
// public void left(LinkedModeModel environment, int flags) {
// ensurePositionCategoryRemoved(document);
// }
//
// public void suspend(LinkedModeModel environment) {}
// public void resume(LinkedModeModel environment, int flags) {}
// });
// }
// }
private void ensurePositionCategoryRemoved(IDocument document) {
if (document.containsPositionCategory(getCategory())) {
try {
document.removePositionCategory(getCategory());
} catch (BadPositionCategoryException e) {
// ignore
}
document.removePositionUpdater(fUpdater);
}
}
private String getCategory() {
return "TemplateProposalCategory_" + toString(); //$NON-NLS-1$
}
private int getCaretOffset(TemplateBuffer buffer) {
TemplateVariable[] variables = buffer.getVariables();
for (int i = 0; i != variables.length; i++) {
TemplateVariable variable = variables[i];
if (variable.getType().equals(GlobalTemplateVariables.Cursor.NAME))
return variable.getOffsets()[0];
}
return buffer.getString().length();
}
/**
* Returns the offset of the range in the document that will be replaced by applying this
* template.
*
* @return the offset of the range in the document that will be replaced by applying this template
*/
protected final int getReplaceOffset() {
int start;
if (fContext instanceof DocumentTemplateContext) {
DocumentTemplateContext docContext = (DocumentTemplateContext) fContext;
start = docContext.getStart();
} else {
start = fRegion.getOffset();
}
return start;
}
/**
* Returns the end offset of the range in the document that will be replaced by applying this
* template.
*
* @return the end offset of the range in the document that will be replaced by applying this
* template
*/
protected final int getReplaceEndOffset() {
int end;
if (fContext instanceof DocumentTemplateContext) {
DocumentTemplateContext docContext = (DocumentTemplateContext) fContext;
end = docContext.getEnd();
} else {
end = fRegion.getOffset() + fRegion.getLength();
}
return end;
}
/*
* @see ICompletionProposal#getSelection(IDocument)
*/
public Point getSelection(IDocument document) {
return new Point(fSelectedRegion.getOffset(), fSelectedRegion.getLength());
}
/*
* @see ICompletionProposal#getAdditionalProposalInfo()
*/
public String getAdditionalProposalInfo() {
try {
fContext.setReadOnly(true);
TemplateBuffer templateBuffer;
try {
templateBuffer = fContext.evaluate(fTemplate);
} catch (TemplateException e) {
return null;
}
IDocument document = new Document(templateBuffer.getString());
IndentUtil.indentLines(document, new LineRange(0, document.getNumberOfLines()), null, null);
StringBuffer buffer = new StringBuffer();
HTMLPrinter.insertPageProlog(buffer, 0, JavadocFinder.getStyleSheet());
HTMLPrinter.addParagraph(buffer, document.get());
HTMLPrinter.addPageEpilog(buffer);
return buffer.toString();
} catch (BadLocationException e) {
// handleException(
// JavaPlugin.getActiveWorkbenchShell(), new CoreException(new Status(IStatus.ERROR, JavaPlugin.getPluginId(), IStatus.OK, "", e))); //$NON-NLS-1$
JavaPlugin.log(e);
return null;
}
}
/*
* @see ICompletionProposal#getDisplayString()
*/
public String getDisplayString() {
return getStyledDisplayString().getString();
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension6#getStyledDisplayString()
* @since 3.4
*/
public StyledString getStyledDisplayString() {
if (fDisplayString == null) {
String[] arguments = new String[] {fTemplate.getName(), fTemplate.getDescription()};
String decorated =
Messages.format(TemplateContentAssistMessages.TemplateProposal_displayString, arguments);
StyledString string = new StyledString(fTemplate.getName(), StyledString.COUNTER_STYLER);
fDisplayString =
StyledCellLabelProvider.styleDecoratedString(
decorated, StyledString.QUALIFIER_STYLER, string);
}
return fDisplayString;
}
public void setDisplayString(StyledString displayString) {
fDisplayString = displayString;
}
/*
* @see ICompletionProposal#getImage()
*/
public Image getImage() {
return fImage;
}
/*
* @see ICompletionProposal#getContextInformation()
*/
public IContextInformation getContextInformation() {
return null;
}
// private void openErrorDialog(Shell shell, Exception e) {
// MessageDialog.openError(shell, TemplateContentAssistMessages.TemplateEvaluator_error_title, e.getMessage());
// }
// private void handleException(Shell shell, CoreException e) {
// ExceptionHandler.handle(e, shell, TemplateContentAssistMessages.TemplateEvaluator_error_title, null);
// }
/*
* @see IJavaCompletionProposal#getRelevance()
*/
public int getRelevance() {
return fRelevance;
}
public void setRelevance(int relevance) {
fRelevance = relevance;
}
// /*
// * @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension3#getInformationControlCreator()
// */
// public IInformationControlCreator getInformationControlCreator() {
// int orientation;
// IEditorPart editor= getJavaEditor();
// if (editor instanceof IWorkbenchPartOrientation)
// orientation= ((IWorkbenchPartOrientation)editor).getOrientation();
// else
// orientation= SWT.LEFT_TO_RIGHT;
// return new TemplateInformationControlCreator(orientation);
// }
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#selected(org.eclipse.jface.text.ITextViewer, boolean)
*/
public void selected(ITextViewer viewer, boolean smartToggle) {}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#unselected(org.eclipse.jface.text.ITextViewer)
*/
public void unselected(ITextViewer viewer) {}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension2#validate(org.eclipse.jface.text.IDocument, int, org.eclipse.jface.text.DocumentEvent)
*/
public boolean validate(IDocument document, int offset, DocumentEvent event) {
try {
int replaceOffset = getReplaceOffset();
if (offset >= replaceOffset) {
String content = document.get(replaceOffset, offset - replaceOffset);
String templateName = fTemplate.getName().toLowerCase();
boolean valid = templateName.startsWith(content.toLowerCase());
if (!valid
&& fContext instanceof JavaDocContext
&& templateName.startsWith("<")) { //$NON-NLS-1$
valid = templateName.startsWith(content.toLowerCase(), 1);
}
return valid;
}
} catch (BadLocationException e) {
// concurrent modification - ignore
}
return false;
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension3#getReplacementString()
*/
public CharSequence getPrefixCompletionText(IDocument document, int completionOffset) {
// bug 114360 - don't make selection templates prefix-completable
if (isSelectionTemplate()) return ""; //$NON-NLS-1$
return fTemplate.getName();
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension3#getReplacementOffset()
*/
public int getPrefixCompletionStart(IDocument document, int completionOffset) {
return getReplaceOffset();
}
/*
* @see org.eclipse.jface.text.contentassist.ICompletionProposalExtension4#isAutoInsertable()
*/
public boolean isAutoInsertable() {
if (isSelectionTemplate()) return false;
return fTemplate.isAutoInsertable();
}
/**
* Returns <code>true</code> if the proposal has a selection, e.g. will wrap some code.
*
* @return <code>true</code> if the proposals completion length is non zero
* @since 3.2
*/
private boolean isSelectionTemplate() {
if (fContext instanceof DocumentTemplateContext) {
DocumentTemplateContext ctx = (DocumentTemplateContext) fContext;
if (ctx.getCompletionLength() > 0) return true;
}
return false;
}
@Override
public String toString() {
return "TemplateProposal{" + "fDisplayString=" + getDisplayString() + '}';
}
@Override
public LinkedModeModel getLinkedModel() {
return linkedModeModel;
}
}
| epl-1.0 |
drbgfc/mdht | cda/plugins/org.openhealthtools.mdht.uml.hl7.datatypes/src/org/openhealthtools/mdht/uml/hl7/datatypes/operations/PQOperations.java | 1579 | /*******************************************************************************
* Copyright (c) 2009, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.openhealthtools.mdht.uml.hl7.datatypes.operations;
import java.math.BigDecimal;
import org.openhealthtools.mdht.uml.hl7.datatypes.PQ;
/**
* <!-- begin-user-doc -->
* A static utility class that provides operations related to '<em><b>PQ</b></em>' model objects.
* <!-- end-user-doc -->
*
* <p>
* The following operations are supported:
* <ul>
* <li>{@link org.openhealthtools.mdht.uml.hl7.datatypes.PQ#setValue(java.lang.Double) <em>Set Value</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class PQOperations extends ANYOperations {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PQOperations() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public static void setValue(PQ pq, Double newValue) {
if (newValue != null) {
pq.setValue(BigDecimal.valueOf(newValue.doubleValue()));
} else {
pq.setValue((BigDecimal) null);
}
}
} // PQOperations
| epl-1.0 |
TypeFox/che | wsagent/che-core-api-testing-shared/src/main/java/org/eclipse/che/api/testing/shared/TestLaunchResult.java | 982 | /*
* Copyright (c) 2012-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.api.testing.shared;
import javax.validation.constraints.NotNull;
import org.eclipse.che.dto.shared.DTO;
/** Describes test position in document */
@DTO
public interface TestLaunchResult {
/** @return {@code true} if tests were launched successfully otherwise returns false */
@NotNull
boolean isSuccess();
void setSuccess(boolean isSuccess);
TestLaunchResult withSuccess(boolean isSuccess);
/** @return port for connecting to the debugger if Debug Mode is on */
@NotNull
int getDebugPort();
void setDebugPort(int port);
TestLaunchResult withDebugPort(int port);
}
| epl-1.0 |
SmithAndr/egit | org.eclipse.egit.ui.test/src/org/eclipse/egit/ui/wizards/clone/SampleTestRepository.java | 5461 | /*******************************************************************************
* Copyright (C) 2010, Matthias Sohn <matthias.sohn@sap.com>
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*******************************************************************************/
package org.eclipse.egit.ui.wizards.clone;
import static org.junit.Assert.assertFalse;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.junit.http.SimpleHttpServer;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryBuilder;
import org.eclipse.jgit.revwalk.RevBlob;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.transport.Daemon;
import org.eclipse.jgit.transport.DaemonClient;
import org.eclipse.jgit.transport.resolver.FileResolver;
import org.eclipse.jgit.util.FileUtils;
/**
* Creates an on disk sample repository with some generated content and starts a
* git daemon on a free port.
*
* If the system property <code>test-repo-no-cleanup</code> is defined the
* source repository data will not be deleted from disk to enable testing the
* test.
*/
public class SampleTestRepository {
/**
* Name of the test repository
*/
public static final String REPO_NAME = "test";
/**
* Name of a branch in the sample repository
*/
public static final String FIX = "fix";
/**
* Name of a tag in the sample repository
*/
public static final String v1_0_name = "v1_0";
/**
* Name of a tag in the sample repository
*/
public static final String v2_0_name = "v2_0";
/**
* Name of a file in the sample repository
*/
public static final String A_txt_name = "A_txt";
private static final File trash = new File("target/trash");
private final TestRepository<Repository> src;
private Daemon d;
private SimpleHttpServer httpServer;
private String uri;
private RevBlob A_txt;
private RevCommit A, B, C;
private RevTag v1_0, v2_0;
private final boolean serveHttp;
public String getUri() {
return uri;
}
/**
* Create a bare repository, generate some sample data and start git daemon
* on a free port
*
* @param n
* hint how many random commits should be generated
* @param serveHttp
*
* @throws Exception
*/
public SampleTestRepository(int n, boolean serveHttp) throws Exception {
this.serveHttp = serveHttp;
src = createRepository();
generateSampleData(n);
if (serveHttp)
serveHttp();
else
serve();
}
private TestRepository<Repository> createRepository() throws Exception {
String gitdirName = "test" + System.currentTimeMillis()
+ Constants.DOT_GIT;
File gitdir = new File(trash, gitdirName).getCanonicalFile();
Repository db = new RepositoryBuilder().setGitDir(gitdir).build();
assertFalse(gitdir.exists());
db.create(true);
return new TestRepository<Repository>(db);
}
private void generateSampleData(int n) throws Exception {
A_txt = src.blob("A");
A = src.commit().add(A_txt_name, A_txt).create();
src.update(Constants.R_HEADS + Constants.MASTER, A);
// create some random commits
RevCommit X = A;
for (int i = 0; i < n; i++) {
X = src.commit().parent(X)
.add(randomAsciiString(), randomAsciiString()).create();
}
B = src.commit().parent(X).add(A_txt_name, "C").add("B", "B").create();
src.update(Constants.R_HEADS + Constants.MASTER, B);
v1_0 = src.tag(v1_0_name, B);
src.update(Constants.R_TAGS + v1_0_name, v1_0);
C = src.commit().parent(A).add(A_txt_name, "D").add("C", "C").create();
src.update(Constants.R_HEADS + FIX, C);
v2_0 = src.tag(v2_0_name, C);
src.update(Constants.R_TAGS + v2_0_name, v2_0);
}
private String randomAsciiString() {
StringBuilder randstring = new StringBuilder("");
Random rand = new Random();
int strlen = rand.nextInt(20) + 10;
for (int i = 0, j = 0; i < strlen; i++) {
if (rand.nextInt(2) == 1)
j = 97;
else
j = 65;
randstring.append((char) (rand.nextInt(26) + j));
}
return randstring.toString();
}
private void serve() throws IOException {
d = new Daemon();
FileResolver<DaemonClient> resolver = new FileResolver<DaemonClient>();
resolver.exportRepository(REPO_NAME, src.getRepository());
d.setRepositoryResolver(resolver);
d.start();
uri = "git://localhost:" + d.getAddress().getPort() + "/" + REPO_NAME
+ Constants.DOT_GIT_EXT;
}
private void serveHttp() throws Exception{
httpServer = new SimpleHttpServer(src.getRepository());
httpServer.start();
uri = httpServer.getUri().toString();
}
/**
* Stop the git daemon and delete test data from disk. If the system
* property <code>test-repo-no-cleanup</code> is defined the test data will
* be left on disk for analysis.
*
* @throws Exception
* deletion of test repository failed
* @throws IOException
* deletion of test repository failed
*/
public void shutDown() throws Exception {
src.getRepository().close();
if (serveHttp)
httpServer.stop();
else
d.stop();
if (!System.getProperties().contains("test-repo-no-cleanup"))
FileUtils.delete(trash, FileUtils.RECURSIVE | FileUtils.RETRY);
}
}
| epl-1.0 |
rleigh-dundee/openmicroscopy | components/client/test/ome/client/itests/TicketsUpTo1000Test.java | 8950 | /*
* $Id$
*
* Copyright 2006 University of Dundee. All rights reserved.
* Use is subject to license terms supplied in LICENSE.txt
*/
package ome.client.itests;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import junit.framework.TestCase;
import ome.api.IAdmin;
import ome.api.IQuery;
import ome.api.IUpdate;
import ome.api.RawFileStore;
import ome.model.IObject;
import ome.model.annotations.Annotation;
import ome.model.annotations.DatasetAnnotationLink;
import ome.model.annotations.TextAnnotation;
import ome.model.containers.Category;
import ome.model.containers.CategoryGroup;
import ome.model.containers.Dataset;
import ome.model.containers.Project;
import ome.model.core.Image;
import ome.model.core.OriginalFile;
import ome.model.enums.Format;
import ome.model.meta.Experimenter;
import ome.system.Login;
import ome.system.ServiceFactory;
import ome.util.Filter;
import ome.util.Filterable;
import ome.util.builders.PojoOptions;
import org.testng.annotations.Test;
@Test(groups = { "client", "integration" })
public class TicketsUpTo1000Test extends TestCase {
ServiceFactory sf = new ServiceFactory("ome.client.test");
IUpdate iUpdate = sf.getUpdateService();
IQuery iQuery = sf.getQueryService();
IAdmin iAdmin = sf.getAdminService();
Login rootLogin = (Login) sf.getContext().getBean("rootLogin");
// ~ Ticket 509
// =========================================================================
@Test(groups = "ticket:509")
public void test_connectionsShouldBeFreedOnClose() throws Exception {
OriginalFile of = makeFile();
for (int i = 0; i < 50; i++) {
RawFileStore rfs = sf.createRawFileStore();
rfs.setFileId(of.getId());
rfs.close();
}
}
@Test(groups = "ticket:509")
public void test_connectionsShouldBeFreedOnTimeout() throws Exception {
OriginalFile of = makeFile();
int count = 0;
for (int i = 0; i < 50; i++) {
try {
RawFileStore rfs = sf.createRawFileStore();
rfs.setFileId(of.getId());
} catch (Exception e) {
count++;
}
}
assertTrue(count + " fails!", count == 0);
}
@Test(groups = "ticket:509")
public void test_simplestPossibleFail() throws Exception {
// The TxInterceptor throws an exception which makes the id for this of
// invalid
OriginalFile of = makeFile();
// must restart the app server to prime this method.
// but probably only when it occurs in the same thread.
RawFileStore rfs = sf.createRawFileStore();
rfs.setFileId(of.getId().longValue());
rfs.close();
sf.getQueryService().get(OriginalFile.class, of.getId());
// The TxInterceptor throws an exception which makes the id for this of
// invalid
of = makeFile();
// must restart the app server to prime this method.
// but probably only when it occurs in the same thread.
rfs = sf.createRawFileStore();
rfs.setFileId(of.getId());
rfs.close();
sf.getQueryService().get(OriginalFile.class, of.getId());
}
// ~ Ticket 530
// =========================================================================
@Test(groups = "ticket:530")
public void test_hierarchyInversionShouldWork() throws Exception {
Dataset d = new Dataset();
d.setName("d");
Image i = new Image();
i.setName("i");
Category c = new Category();
c.setName("c");
d.linkImage(i);
c.linkImage(i);
i = sf.getUpdateService().saveAndReturnObject(i);
d = i.linkedDatasetList().get(0);
c = i.linkedCategoryList().get(0);
Long user_id = sf.getAdminService().getEventContext()
.getCurrentUserId();
PojoOptions po = new PojoOptions();
po.leaves();
po.exp(user_id);
Set s = sf.getPojosService().findContainerHierarchies(Project.class,
Collections.singleton(i.getId()), po.map());
s = sf.getPojosService().findContainerHierarchies(CategoryGroup.class,
Collections.singleton(i.getId()), po.map());
assertTrue(s.size() > 0);
}
// ~ Ticket 541
// =========================================================================
@Test(groups = "ticket:541")
public void test_updateMultipleThrowsOptimisticLock() throws Exception {
Image image = new Image();
Dataset dataset = new Dataset();
image.setName("ticket:541");
dataset.setName("ticket:541");
image.linkDataset(dataset);
image = iUpdate.saveAndReturnObject(image);
dataset = image.linkedDatasetList().get(0);
image.unlinkDataset(dataset);
iUpdate.saveArray(new IObject[] { image, dataset });
}
// ~ Ticket 546
// =========================================================================
@Test(groups = "ticket:546")
public void test_createDataObjectsShouldLoadAnnotations() throws Exception {
Dataset d = makeDataset();
Annotation annotation = makeAnnotation();
DatasetAnnotationLink link = d.linkAnnotation(annotation);
link = sf.getPojosService().createDataObject(link, null);
annotation = link.child();
assertNotNull(link.parent());
assertTrue(link.parent().isLoaded());
d = makeDataset();
annotation = makeAnnotation();
link = d.linkAnnotation(annotation);
link = (DatasetAnnotationLink) sf.getPojosService().createDataObjects(
new DatasetAnnotationLink[] { link }, null)[0];
assertNotNull(link.parent());
assertTrue(link.parent().isLoaded());
}
private Annotation makeAnnotation() {
TextAnnotation annotation = new TextAnnotation();
annotation.setNs("ticket:546");
annotation.setTextValue("ticket:546");
return annotation;
}
private Dataset makeDataset() {
Dataset d = new Dataset();
d.setName("ticket:546");
d = sf.getPojosService().createDataObject(d, null);
return d;
}
// ~ Ticket 555
// =========================================================================
@Test(groups = "ticket:555")
public void test_iadminAllowsUpdatingUsers() throws Exception {
ServiceFactory root = new ServiceFactory(rootLogin);
String newSysUser = updateNewUser(root);
Login systemUser = new Login(newSysUser, "", "system", "Test");
ServiceFactory sys = new ServiceFactory(systemUser);
updateNewUser(sys);
}
protected String updateNewUser(ServiceFactory services) {
String name = UUID.randomUUID().toString();
Experimenter e = new Experimenter();
e.setOmeName(name);
e.setFirstName("ticket:555");
e.setLastName("ticket:555");
long id = services.getAdminService().createSystemUser(e);
Experimenter test = services.getAdminService().lookupExperimenter(name);
String email = "ticket@555";
test.setEmail(email);
services.getAdminService().updateExperimenter(test);
test = services.getAdminService().lookupExperimenter(name);
assertEquals(email, test.getEmail());
return name;
}
// =========================================================================
@Test(groups = "ticket:778")
public void test_ProjAnnCollectionError() throws Exception {
Project p = new Project();
p.setName("ticket:778");
p = sf.getPojosService().createDataObject(p, new PojoOptions().map());
Project fromServer = sf.getQueryService().get(Project.class, p.getId());
fromServer.setDescription("desc updated");
fromServer.acceptFilter(new Filter() {
public Filterable filter(String fieldId, Filterable f) {
return f;
}
public Collection filter(String fieldId, Collection c) {
return null;
}
public Map filter(String fieldId, Map m) {
return m;
}
public Object filter(String fieldId, Object o) {
return o;
}
});
sf.getPojosService().updateDataObject(fromServer,
new PojoOptions().map());
}
// ~ Helpers
// =========================================================================
private OriginalFile makeFile() {
OriginalFile of = new OriginalFile();
of.setSha1("ticket:509");
of.setSize(0L);
of.setName("ticket:509");
of.setPath("/dev/null");
of.setFormat(new Format(1L, false));
of = sf.getUpdateService().saveAndReturnObject(of);
return of;
}
} | gpl-2.0 |
FauxFaux/jdk9-jaxws | src/java.xml.ws/share/classes/com/sun/xml/internal/ws/encoding/XmlDataContentHandler.java | 5409 | /*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.ws.encoding;
import com.sun.xml.internal.ws.util.xml.XmlUtil;
import javax.activation.ActivationDataFlavor;
import javax.activation.DataContentHandler;
import javax.activation.DataSource;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import java.awt.datatransfer.DataFlavor;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Arrays;
/**
* JAF data handler for XML content
*
* @author Jitendra Kotamraju
*/
public class XmlDataContentHandler implements DataContentHandler {
private final DataFlavor[] flavors;
public XmlDataContentHandler() throws ClassNotFoundException {
flavors = new DataFlavor[3];
flavors[0] = new ActivationDataFlavor(StreamSource.class, "text/xml", "XML");
flavors[1] = new ActivationDataFlavor(StreamSource.class, "application/xml", "XML");
flavors[2] = new ActivationDataFlavor(String.class, "text/xml", "XML String");
}
public DataFlavor[] getTransferDataFlavors() {
return Arrays.copyOf(flavors, flavors.length);
}
public Object getTransferData(DataFlavor df, DataSource ds)
throws IOException {
for (DataFlavor aFlavor : flavors) {
if (aFlavor.equals(df)) {
return getContent(ds);
}
}
return null;
}
/**
* Create an object from the input stream
*/
public Object getContent(DataSource ds) throws IOException {
String ctStr = ds.getContentType();
String charset = null;
if (ctStr != null) {
ContentType ct = new ContentType(ctStr);
if (!isXml(ct)) {
throw new IOException(
"Cannot convert DataSource with content type \""
+ ctStr + "\" to object in XmlDataContentHandler");
}
charset = ct.getParameter("charset");
}
return (charset != null)
? new StreamSource(new InputStreamReader(ds.getInputStream()), charset)
: new StreamSource(ds.getInputStream());
}
/**
* Convert the object to a byte stream
*/
public void writeTo(Object obj, String mimeType, OutputStream os)
throws IOException {
if (!(obj instanceof DataSource || obj instanceof Source || obj instanceof String)) {
throw new IOException("Invalid Object type = "+obj.getClass()+
". XmlDataContentHandler can only convert DataSource|Source|String to XML.");
}
ContentType ct = new ContentType(mimeType);
if (!isXml(ct)) {
throw new IOException(
"Invalid content type \"" + mimeType + "\" for XmlDataContentHandler");
}
String charset = ct.getParameter("charset");
if (obj instanceof String) {
String s = (String) obj;
if (charset == null) {
charset = "utf-8";
}
OutputStreamWriter osw = new OutputStreamWriter(os, charset);
osw.write(s, 0, s.length());
osw.flush();
return;
}
Source source = (obj instanceof DataSource)
? (Source)getContent((DataSource)obj) : (Source)obj;
try {
Transformer transformer = XmlUtil.newTransformer();
if (charset != null) {
transformer.setOutputProperty(OutputKeys.ENCODING, charset);
}
StreamResult result = new StreamResult(os);
transformer.transform(source, result);
} catch (Exception ex) {
throw new IOException(
"Unable to run the JAXP transformer in XmlDataContentHandler "
+ ex.getMessage());
}
}
private boolean isXml(ContentType ct) {
return ct.getSubType().equals("xml") &&
(ct.getPrimaryType().equals("text") || ct.getPrimaryType().equals("application"));
}
}
| gpl-2.0 |
alanbuttars/openqollo | client/platforms/android/gen/com/alanbuttars/qollo/Manifest.java | 191 | /*___Generated_by_IDEA___*/
package com.alanbuttars.qollo;
/* This stub is only used by the IDE. It is NOT the Manifest class actually packed into the APK */
public final class Manifest {
} | gpl-2.0 |
AntumDeluge/arianne-stendhal | src/games/stendhal/client/actions/BareBonesBrowserLaunchCommand.java | 2263 | /* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.actions;
import games.stendhal.client.ClientSingletonRepository;
import games.stendhal.client.gui.BareBonesBrowserLaunch;
import games.stendhal.client.gui.chatlog.HeaderLessEventLine;
import games.stendhal.common.NotificationType;
/**
* generalized super class to provide a uniform way to open urls in the browser
*
* @author madmetzger
*/
class BareBonesBrowserLaunchCommand implements SlashAction{
private final String urlToOpen;
/**
* creates a new BareBonesBrowserLaunchCommand
*
* @param url url to open
*/
BareBonesBrowserLaunchCommand(String url) {
urlToOpen = url;
}
/**
* Opens an URL with the browser
*
* @param params ignored
* @param remainder ignored
* @return <code>true</code>
*/
@Override
public boolean execute(final String[] params, final String remainder) {
ClientSingletonRepository.getUserInterface().addEventLine(new HeaderLessEventLine(
"Trying to open #" + urlToOpen + " in your browser.",
NotificationType.CLIENT));
BareBonesBrowserLaunch.openURL(urlToOpen);
return true;
}
/**
* Get the maximum number of formal parameters.
*
* @return The parameter count.
*/
@Override
public int getMaximumParameters() {
return 0;
}
/**
* Get the minimum number of formal parameters.
*
* @return The parameter count.
*/
@Override
public int getMinimumParameters() {
return 0;
}
}
| gpl-2.0 |
scotthew/RomRaider | src/main/java/com/romraider/io/j2534/api/TestJ2534IsoTp.java | 15113 | /*
* RomRaider Open-Source Tuning, Logging and Reflashing
* Copyright (C) 2006-2013 RomRaider.com
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package com.romraider.io.j2534.api;
import static com.romraider.util.ByteUtil.asUnsignedInt;
import static com.romraider.util.HexUtil.asHex;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import com.romraider.io.j2534.api.J2534Impl.Config;
import com.romraider.io.j2534.api.J2534Impl.Protocol;
import com.romraider.io.j2534.api.J2534Impl.TxFlags;
import com.romraider.logger.ecu.exception.InvalidResponseException;
import com.romraider.util.HexUtil;
import com.romraider.util.LogManager;
/**
* This class is used to exercise the J2534 API against a real J2534 device and
* an active ECU using the ISO15765-2 protocol.
*/
public final class TestJ2534IsoTp {
private static J2534 api ;
private static final int LOOPBACK = 0;
private static final byte[] mask1 = {
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff};
private static final byte[] match1 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xe8};
private static final byte[] fc1 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xe0};
private static final byte ECU_NRC = (byte) 0x7F;
private static final byte READ_MODE9_RESPONSE = (byte) 0x49;
private static final byte READ_MODE9_COMMAND = (byte) 0x09;
private static final byte[] READ_MODE9_PIDS = {
(byte) 0x02, (byte) 0x04, (byte) 0x06};
private static final byte READ_MODE3_RESPONSE = (byte) 0x43;
private static final byte READ_MODE3_COMMAND = (byte) 0x03;
private static final byte ECU_INIT_COMMAND = (byte) 0x01;
private static final byte ECU_INIT_RESPONSE = (byte) 0x41;
private static final StringBuilder sb = new StringBuilder();
public TestJ2534IsoTp() throws InterruptedException {
final int deviceId = api.open();
sb.delete(0, sb.capacity());
try {
version(deviceId);
final int channelId = api.connect(deviceId, 0, 500000);
final double vBatt = api.getVbattery(deviceId);
// System.out.println("Pin 16: " + vBatt + " VDC");
sb.append(String.format(
"J2534 Interface Pin 16: %sVDC%n", vBatt));
final byte[] mask2 = {
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff};
final byte[] match2 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xdf};
final byte[] fc2 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xdf}; //Tester
final byte[] mask3 = {
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff};
final byte[] match3 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xe0};
final byte[] fc3 = {
(byte) 0x00, (byte) 0x00, (byte) 0x07, (byte) 0xe0};
final int msgId = api.startFlowCntrlFilter(
channelId, mask1, match1, fc1, TxFlags.ISO15765_FRAME_PAD);
// final int msgId1 = api.startFlowCntrlFilter(
// channelId, mask2, match2, fc2, TxFlags.ISO15765_FRAME_PAD);
// final int msgId2 = api.startFlowCntrlFilter(
// channelId, mask3, match3, fc3, TxFlags.ISO15765_FRAME_PAD);
try {
setConfig(channelId);
getConfig(channelId);
sb.append(String.format("%n--- Vehicle Information ---%n"));
for (byte pid : READ_MODE9_PIDS) {
final byte[] mode9 = buildRequest(
READ_MODE9_COMMAND, // mode
pid, // pid
true, // pid valid
fc1); // source
api.writeMsg(channelId,
mode9,
1000L,
TxFlags.ISO15765_FRAME_PAD);
final byte[] response;
response = api.readMsg(channelId, 1, 1000L);
// System.out.println("Response = " +
// HexUtil.asHex(response));
handleResponse(response);
}
final byte[] mode3 = buildRequest(
READ_MODE3_COMMAND, // mode
(byte) 0x00, // pid
false, // pid valid
fc1); // source
api.writeMsg(channelId,
mode3,
1000L,
TxFlags.ISO15765_FRAME_PAD);
final byte[] response;
response = api.readMsg(channelId, 1, 1000L);
// System.out.println("Response = " +
// HexUtil.asHex(response));
handleResponse(response);
}
catch (Exception e) {
// System.out.println(e);
sb.append(e);
}
finally {
api.stopMsgFilter(channelId, msgId);
// api.stopMsgFilter(channelId, msgId1);
// api.stopMsgFilter(channelId, msgId2);
api.disconnect(channelId);
}
} finally {
api.close(deviceId);
}
}
public final String toString() {
return sb.toString();
}
private final static void handleResponse(byte[] response) {
validateResponse(response);
final int responseLen = response.length;
if (response[4] == READ_MODE9_RESPONSE) {
final byte[] data = new byte[responseLen - 7];
System.arraycopy(response, 7, data, 0, data.length);
if (response[5] == 0x02) {
// System.out.println("VIN: " + new String(data));
sb.append(String.format(
"VIN: %s%n", new String(data)));
}
if (response[5] == 0x04) {
int i;
for (i = 0; i < data.length && data[i] != 0; i++) { }
final String str = new String(data, 0, i);
// System.out.println("CAL ID: " + str);
sb.append(String.format(
"CAL ID: %s%n", str));
}
if (response[5] == 0x06) {
// System.out.println("CVN: " + HexUtil.asHex(data));
sb.append(String.format(
"CVN: %s%n", HexUtil.asHex(data)));
}
if (response[5] == 0x08) {
// System.out.println("PID_8: " + HexUtil.asHex(data));
}
if (response[5] == 0x0A) {
int i;
for (i = 0; i < data.length && data[i] != 0; i++) { }
final String str = new String(data, 0, i);
// System.out.println("Module: " + str);
sb.append(String.format(
"Module: %s%n", str));
}
}
if (response[4] == READ_MODE3_RESPONSE) {
if (response[5] > 0x00) {
final byte[] data = new byte[responseLen - 6];
System.arraycopy(response, 6, data, 0, data.length);
int i;
int j = 1;
final byte[] codeHex = new byte[2];
for (i = 0; i < data.length; i = i + 2) {
System.arraycopy(data, i, codeHex, 0, 2);
final byte module = (byte) ((codeHex[0] & 0xC0) >> 6);
String moduleTxt = null;
switch (module) {
case 0:
moduleTxt = "P";
break;
case 1:
moduleTxt = "C";
break;
case 2:
moduleTxt = "B";
break;
case 3:
moduleTxt = "U";
break;
}
final byte dtcB1 = (byte) ((codeHex[0] & 0x30) >> 4);
final byte dtcB2 = (byte) (codeHex[0] & 0x0F);
final byte dtcB3 = (byte) ((codeHex[1] & 0xF0) >> 4);
final byte dtcB4 = (byte) (codeHex[1] & 0x0F);
// System.out.print(
// String.format("DTC %d: %s%s%s%s%s%n",
// j,
// moduleTxt,
// Character.forDigit(dtcB1, 16),
// Character.forDigit(dtcB2, 16),
// Character.forDigit(dtcB3, 16),
// Character.forDigit(dtcB4, 16)));
sb.append(String.format(
"DTC %d: %s%s%s%s%s%n",
j,
moduleTxt,
Character.forDigit(dtcB1, 16),
Character.forDigit(dtcB2, 16),
Character.forDigit(dtcB3, 16),
Character.forDigit(dtcB4, 16)));
j++;
}
}
}
}
private final static void version(final int deviceId) {
final Version version = api.readVersion(deviceId);
// System.out.printf("Version => Firmware:[%s], DLL:[%s], API:[%s]%n",
// version.firmware, version.dll, version.api);
sb.append(String.format(
"J2534 Firmware:[%s], DLL:[%s], API:[%s]%n",
version.firmware, version.dll, version.api));
}
private final static void setConfig(int channelId) {
final ConfigItem loopback = new ConfigItem(Config.LOOPBACK.getValue(), LOOPBACK);
final ConfigItem bs = new ConfigItem(Config.ISO15765_BS.getValue(), 0);
final ConfigItem stMin = new ConfigItem(Config.ISO15765_STMIN.getValue(), 0);
final ConfigItem bs_tx = new ConfigItem(Config.BS_TX.getValue(), 0xffff);
final ConfigItem st_tx = new ConfigItem(Config.STMIN_TX.getValue(), 0xffff);
final ConfigItem wMax = new ConfigItem(Config.ISO15765_WFT_MAX.getValue(), 0);
api.setConfig(channelId, loopback, bs, stMin, bs_tx, st_tx, wMax);
}
private final static void getConfig(int channelId) {
final ConfigItem[] configs = api.getConfig(
channelId,
Config.LOOPBACK.getValue(),
Config.ISO15765_BS.getValue(),
Config.ISO15765_STMIN.getValue(),
Config.BS_TX.getValue(),
Config.STMIN_TX.getValue(),
Config.ISO15765_WFT_MAX.getValue()
);
int i = 1;
for (ConfigItem item : configs) {
// System.out.printf("J2534 Config item %d: Parameter: %s, value:%d%n",
// i, Config.get(item.parameter), item.value);
sb.append(String.format(
"J2534 Config item %d: Parameter: %s, value:%d%n",
i, Config.get(item.parameter), item.value));
i++;
}
}
private final static void validateResponse(byte[] response) {
assertEquals(match1, response, "Invalid ECU id");
if (response.length == 7) {
assertNrc(ECU_NRC, response[4], response[5], response[6],"Request type not supported");
}
assertOneOf(new byte[]{ECU_INIT_RESPONSE, READ_MODE3_RESPONSE, READ_MODE9_RESPONSE}, response[4], "Invalid response code");
}
private final static void assertNrc(byte expected, byte actual, byte command, byte code, String msg) {
if (actual == expected) {
String ec = " unsupported.";
if (code == 0x13) {
ec = " invalid format or length.";
}
throw new InvalidResponseException(
msg + ". Command: " + asHex(new byte[]{command}) + ec);
}
}
private final static void assertEquals(byte[] expected, byte[] actual, String msg) {
final byte[] idBytes = new byte[4];
System.arraycopy(actual, 0, idBytes, 0, 4);
final int idExpected = asUnsignedInt(expected);
final int idActual = asUnsignedInt(idBytes);
if (idActual != idExpected) {
throw new InvalidResponseException(
msg + ". Expected: " + asHex(expected) +
". Actual: " + asHex(idBytes) + ".");
}
}
private final static void assertOneOf(byte[] validOptions, byte actual, String msg) {
for (byte option : validOptions) {
if (option == actual) {
return;
}
}
StringBuilder builder = new StringBuilder();
for (int i = 0; i < validOptions.length; i++) {
if (i > 0) {
builder.append(", ");
}
builder.append(asHex(new byte[]{validOptions[i]}));
}
throw new InvalidResponseException(msg + ". Expected one of [" + builder.toString() + "]. Actual: " + asHex(new byte[]{actual}) + ".");
}
private final static byte[] buildRequest(
byte mode,
byte pid,
boolean pidValid,
byte[]... content) {
final ByteArrayOutputStream bb = new ByteArrayOutputStream(6);
try {
for (byte[] tmp : content) {
bb.write(tmp);
}
bb.write(mode);
if (pidValid) {
bb.write(pid);
}
} catch (IOException e) {
e.printStackTrace();
}
return bb.toByteArray();
}
public final static void main(String args[]) throws InterruptedException{
LogManager.initDebugLogging();
if (args.length == 0) { //op20pt32 MONGI432
api = new J2534Impl(
Protocol.ISO15765, "op20pt32");
}
else {
api = new J2534Impl(
Protocol.ISO15765, args[0]);
}
final TestJ2534IsoTp test1 = new TestJ2534IsoTp();
System.out.print(test1.toString());
}
}
| gpl-2.0 |
juanma2268/jumbertoTeia2600 | jumbertoNetbeans7.2/src/sphinx4/edu/cmu/sphinx/linguist/language/ngram/InterpolatedLanguageModel.java | 6051 | /*
* Created on Jan 21, 2005
*/
package edu.cmu.sphinx.linguist.language.ngram;
import edu.cmu.sphinx.linguist.WordSequence;
import edu.cmu.sphinx.util.LogMath;
import edu.cmu.sphinx.util.props.*;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Simple interpolated LM implementation.
*
* @author Tanel Alumae
*/
public class InterpolatedLanguageModel implements LanguageModel {
/**
* The property that defines the logMath component.
*/
@S4Component(type = LogMath.class)
public final static String PROP_LOG_MATH = "logMath";
/**
* The property that defines the language models to be interpolated.
*/
@S4ComponentList(type = LanguageModel.class)
public final static String PROP_LANGUAGE_MODELS = "languageModels";
/**
* The property that defines the language models weights
*/
@S4StringList
public final static String PROP_LANGUAGE_MODEL_WEIGHTS = "languageModelWeights";
private LogMath logMath;
private boolean allocated = false;
private List<LanguageModel> languageModels;
private float weights[];
private int numberOfLanguageModels;
private Set<String> vocabulary;
private static final double EPSILON = 0.001;
public InterpolatedLanguageModel(LogMath logMath, List<LanguageModel> languageModels, float [] floats ) {
this.languageModels = languageModels;
this.numberOfLanguageModels = languageModels.size();
this.weights = new float[floats.length];
float weightSum = 0;
for (int i = 0; i < floats.length; i++) {
weightSum += floats[i];
this.weights[i] = logMath.linearToLog(floats[i]);
}
if (weightSum < 1.0 - EPSILON || weightSum > 1.0 + EPSILON) {
throw new PropertyException(
InterpolatedLanguageModel.class.getName(),
PROP_LANGUAGE_MODEL_WEIGHTS,
"Weights do not sum to 1.0");
}
}
public InterpolatedLanguageModel() {
}
@Override
public void newProperties(PropertySheet ps) throws PropertyException {
if (allocated) {
throw new RuntimeException("Can't change properties after allocation");
}
logMath = (LogMath) ps.getComponent(PROP_LOG_MATH);
languageModels = ps.getComponentList(PROP_LANGUAGE_MODELS, LanguageModel.class);
numberOfLanguageModels = languageModels.size();
// read weights as a String List.
List<String> items = ps.getStringList(PROP_LANGUAGE_MODEL_WEIGHTS);
if (items.size() != numberOfLanguageModels) {
throw new RuntimeException("Number of weights not equal to number of language models");
}
// convert Strings to floats and assign weights.
float[] floats = new float[items.size()];
weights = new float[floats.length];
float weightSum = 0;
for (int i = 0; i < items.size(); i++) {
try {
floats[i] = Float.parseFloat(items.get(i));
weightSum += floats[i];
weights[i] = logMath.linearToLog(floats[i]);
} catch (NumberFormatException e) {
throw new PropertyException(
InterpolatedLanguageModel.class.getName(),
PROP_LANGUAGE_MODEL_WEIGHTS,
"Float value expected from the property list. But found:" + items.get(i));
}
}
if (weightSum < 1.0 - EPSILON || weightSum > 1.0 + EPSILON) {
throw new PropertyException(
InterpolatedLanguageModel.class.getName(),
PROP_LANGUAGE_MODEL_WEIGHTS,
"Weights do not sum to 1.0");
}
}
@Override
public void allocate() throws IOException {
if (!allocated) {
allocated = true;
vocabulary = new HashSet<String>();
for (LanguageModel model : languageModels) {
model.allocate();
vocabulary.addAll(model.getVocabulary());
}
}
}
@Override
public void deallocate() throws IOException {
allocated = false;
for (LanguageModel model : languageModels) {
model.deallocate();
}
}
/**
* Called before a recognition
*/
@Override
public void start() {
}
/**
* Called after a recognition
*/
@Override
public void stop() {
}
/**
* Calculates probability p = w[1]*p[1] + w[2]*p[2] + ... (in log domain)
*
* @see edu.cmu.sphinx.linguist.language.ngram.LanguageModel#getProbability(edu.cmu.sphinx.linguist.WordSequence)
*/
@Override
public float getProbability(WordSequence wordSequence) {
float prob = 0;
for (int i = 0; i < numberOfLanguageModels; i++) {
float p = weights[i] + (languageModels.get(i)).getProbability(wordSequence);
if (i == 0) {
prob = p;
} else {
prob = logMath.addAsLinear(prob, p);
}
}
return prob;
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.linguist.language.ngram.LanguageModel#getSmear(edu.cmu.sphinx.linguist.WordSequence)
*/
@Override
public float getSmear(WordSequence wordSequence) {
return 1.0f; // TODO not implemented
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.linguist.language.ngram.LanguageModel#getVocabulary()
*/
@Override
public Set<String> getVocabulary() {
return vocabulary;
}
/* (non-Javadoc)
* @see edu.cmu.sphinx.linguist.language.ngram.LanguageModel#getMaxDepth()
*/
@Override
public int getMaxDepth() {
int maxDepth = 0;
for (LanguageModel languageModel : languageModels) {
int d = languageModel.getMaxDepth();
if (d > maxDepth) {
maxDepth = d;
}
}
return maxDepth;
}
}
| gpl-2.0 |
rupenp/CoreNLP | src/edu/stanford/nlp/util/DeltaCollectionValuedMap.java | 6469 | package edu.stanford.nlp.util;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
/**
* Implementation of CollectionValuedMap that appears to store an "original"
* map and changes to that map. No one currently uses it. See {@link DeltaMap}.
*
* @author Teg Grenager (grenager@cs.stanford.edu)
* @version Jan 14, 2004
*/
public class DeltaCollectionValuedMap<K, V> extends CollectionValuedMap<K, V> {
private static final long serialVersionUID = 1L;
private final CollectionValuedMap<K, V> originalMap;
@SuppressWarnings("serial")
private final Map<K, Collection<V>> deltaMap;
private static final Object removedValue = new Object();
static class SimpleEntry<K, V> implements Map.Entry<K, V> {
K key;
V value;
public SimpleEntry(K key, V value) {
this.key = key;
this.value = value;
}
public SimpleEntry(Map.Entry<K, V> e) {
this.key = e.getKey();
this.value = e.getValue();
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public V setValue(V value) {
V oldValue = this.value;
this.value = value;
return oldValue;
}
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
Map.Entry e = ErasureUtils.uncheckedCast(o);
return eq(key, e.getKey()) && eq(value, e.getValue());
}
@Override
public int hashCode() {
return ((key == null) ? 0 : key.hashCode()) ^ ((value == null) ? 0 : value.hashCode());
}
@Override
public String toString() {
return key + "=" + value;
}
private static boolean eq(Object o1, Object o2) {
return (o1 == null ? o2 == null : o1.equals(o2));
}
}
@Override
public Collection<V> get(Object key) {
// key could be not in original or in deltaMap
// key could be not in original but in deltaMap
// key could be in original but removed from deltaMap
// key could be in original but mapped to something else in deltaMap
Collection<V> deltaResult = deltaMap.get(key);
if (deltaResult == null) {
return originalMap.get(key);
}
if (deltaResult == removedValue) {
return cf.newEmptyCollection();
}
return deltaResult;
}
// Modification Operations
@Override
public Collection<V> put(K key, Collection<V> value) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(Map<? extends K, ? extends Collection<V>> m) {
throw new UnsupportedOperationException();
}
@Override
public void add(K key, V value) {
Collection<V> deltaC = deltaMap.get(key);
if (deltaC == null) {
deltaC = cf.newCollection();
Collection<V> originalC = originalMap.get(key);
if (originalC != null) {
deltaC.addAll(originalC);
}
deltaMap.put(key, deltaC);
}
deltaC.add(value);
}
/**
* Adds all of the mappings in m to this CollectionValuedMap.
* If m is a CollectionValuedMap, it will behave strangely. Use the constructor instead.
*
*/
@Override
public void addAll(Map<K, V> m) {
for (Map.Entry<K, V> e : m.entrySet()) {
add(e.getKey(), e.getValue());
}
}
@Override
public Collection<V> remove(Object key) {
Collection<V> result = get(key);
deltaMap.put(ErasureUtils.uncheckedCast(key), ErasureUtils.uncheckedCast(removedValue));
return result;
}
@Override
public void removeMapping(K key, V value) {
Collection<V> deltaC = deltaMap.get(key);
if (deltaC == null) {
Collection<V> originalC = originalMap.get(key);
if (originalC != null && originalC.contains(value)) {
deltaC = cf.newCollection();
deltaC.addAll(originalC);
deltaMap.put(key, deltaC);
}
}
if (deltaC != null) {
deltaC.remove(value);
}
}
@SuppressWarnings("SuspiciousMethodCalls")
@Override
public boolean containsKey(Object key) {
// key could be not in original or in deltaMap
// key could be not in original but in deltaMap
// key could be in original but removed from deltaMap
// key could be in original but mapped to something else in deltaMap
Object value = deltaMap.get(key);
if (value == null) {
return originalMap.containsKey(key);
}
return value != removedValue;
}
@Override
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
// Bulk Operations
/**
* This is more expensive than normal.
*/
@Override
public void clear() {
// iterate over all keys in originalMap and set them to null in deltaMap
for (K key : originalMap.keySet()) {
deltaMap.put(key, ErasureUtils.uncheckedCast(removedValue));
}
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public int size() {
return entrySet().size();
}
@Override
public Collection<Collection<V>> values() {
throw new UnsupportedOperationException();
}
// Views
/**
* This is cheap.
*
* @return A set view of the mappings contained in this map.
*/
@Override
public Set<Entry<K, Collection<V>>> entrySet() {
return new AbstractSet<Entry<K, Collection<V>>>() {
@Override
public Iterator<Map.Entry<K, Collection<V>>> iterator() {
Predicate<Entry<K, Collection<V>>> filter1 = e -> ! deltaMap.containsKey(e.getKey());
Iterator<Map.Entry<K, Collection<V>>> iter1 = new FilteredIterator<>(originalMap.entrySet().iterator(), filter1);
Predicate<Entry<K, Collection<V>>> filter2 = e -> e.getValue() != removedValue;
Iterator<Map.Entry<K, Collection<V>>> iter2 = new FilteredIterator<>(deltaMap.entrySet().iterator(), filter2);
return new ConcatenationIterator<>(iter1, iter2);
}
@Override
public int size() {
int size = 0;
for (@SuppressWarnings("unused") Entry<K, Collection<V>> ignored : this) {
size++;
}
return size;
}
};
}
public DeltaCollectionValuedMap(CollectionValuedMap<K, V> originalMap) {
super(originalMap.mf, originalMap.cf, originalMap.treatCollectionsAsImmutable);
this.originalMap = originalMap;
this.deltaMap = mf.newMap();
}
}
| gpl-2.0 |
formeppe/NewAge-JGrass | oms3/src/main/java/oms3/Notification.java | 9796 | /*
* $Id$
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software
* in a product, an acknowledgment in the product documentation would be
* appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source
* distribution.
*/
package oms3;
import java.util.EventListener;
import java.util.EventObject;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.event.EventListenerList;
/** Event Notification class. This class handles
* Allows the
*
* @author Olaf David (olaf.david@ars.usda.gov)
* @version $Id$
*/
public class Notification {
EventListenerList ll = new EventListenerList();
Controller c;
/* avoid Event object creation and fire(..) calls if no listeners. */
boolean shouldFire = false;
// private static Notification instance;
//
// public static final Notification instance() {
// if (instance == null) {
// instance = new Notification();
// }
// return instance;
// }
protected static final Logger log = Logger.getLogger("oms3.sim");
Notification(Controller c) {
this.c = c;
}
final boolean shouldFire() {
return shouldFire;
}
Controller getController() {
return c;
}
void addListener(Listener l) {
if (log.isLoggable(Level.CONFIG)) {
log.config("Adding Notification Listener " + l);
}
ll.add(Listener.class, l);
shouldFire = true;
}
void removeListener(Listener l) {
if (log.isLoggable(Level.CONFIG)) {
log.config("Removing Notification Listener " + l);
}
ll.remove(Listener.class, l);
shouldFire = ll.getListenerCount() > 0;
}
private void fire(Type t, EventObject E) {
Object[] listeners = ll.getListenerList();
for (int i = listeners.length - 2; i >= 0; i -= 2) {
((Listener) listeners[i + 1]).notice(t, E);
}
}
void fireWait(ComponentAccess w) {
if (shouldFire) {
fire(Type.WAITING, new ComponentEvent(c, w.getComponent()));
}
}
void fireStart(ComponentAccess w) {
if (shouldFire) {
fire(Type.EXECUTING, new ComponentEvent(c, w.getComponent()));
}
}
void fireFinnish(ComponentAccess w) {
if (shouldFire) {
fire(Type.FINISHED, new ComponentEvent(c, w.getComponent()));
}
}
void fireException(ComponentException E) {
if (shouldFire) {
fire(Type.EXCEPTION, new ExceptionEvent(c, E));
}
}
// void fireConnect(Access srcAccess, Access destAccess) {
// if (shouldFire) {
// fire(Type.CONNECT, new ConnectEvent(c, srcAccess, destAccess));
// }
// }
// void fireMapIn(Access srcAccess, Access destAccess) {
// if (shouldFire) {
// fire(Type.MAPIN, new ConnectEvent(c, srcAccess, destAccess));
// }
// }
// void fireMapOut(Access srcAccess, Access destAccess) {
// if (shouldFire) {
// fire(Type.MAPOUT, new ConnectEvent(c, srcAccess, destAccess));
// }
// }
void fireIn(DataflowEvent e) {
fire(Type.IN, e);
}
void fireOut(DataflowEvent e) {
fire(Type.OUT, e);
}
/**
* Notification Types. Thoes values classify an event object.
*/
public enum Type {
/**
* Execution waiting here for all @In to arrive
* @see ComponentEvent
*/
WAITING,
/** A component is about to be executed.
* @see ComponentEvent
*/
EXECUTING,
/**
* A component is done with execution
* @see ComponentEvent
*/
FINISHED,
/**
* A components @In field is receiving a value
* @see ComponentEvent
*/
IN,
/**
* A components @Out field is providing a value
* @see ConnectEvent
*/
OUT,
/**
* Exception was thrown by a component
* @see ConnectEvent
*/
EXCEPTION,
/**
* An @In field is connected to an out field of a component.
* @see DataflowEvent
*/
// CONNECT,
// /**
// * A Component field is mapped to an @In field of
// * a containing component.
// * @see DataflowEvent
// */
// MAPIN,
// /**
// * A Component field is mapped to an @Out field of
// * a containing component.
// *
// * @see DataflowEvent
// */
// MAPOUT,
// FIELDIN,
// FIELDOUT,
// VALIN
}
/**
* Notification Listener.
*/
public interface Listener extends EventListener {
/**
* Called when an event happens.
* @param t event type
* @param E the event
*/
// stateChanged(Type t, EventObject E);
void notice(Type t, EventObject E);
}
/**
* Connection Event.
*/
public static class ConnectEvent extends EventObject {
private static final long serialVersionUID = 1410979580285808419L;
Access from;
Access to;
ConnectEvent(Object src, Access from, Access to) {
super(src);
this.from = from;
this.to = to;
}
/**
* Get the source of the connect event
* @return the field access object being the connect source
*/
public Access getFrom() {
return from;
}
/**
* Get the destination for the connect event.
*
* @return the target Field access component.
*/
public Access getTo() {
return to;
}
@Override
public String toString() {
return "Connect: " + from + " -> " + to;
}
}
/**
* Component Event.
*/
public static class ComponentEvent extends EventObject {
private static final long serialVersionUID = -8569599337868335893L;
Object comp;
ComponentEvent(Object src, Object comp) {
super(src);
this.comp = comp;
}
/** Get the component for this event.
*
* @return the component
*/
public Object getComponent() {
return comp;
}
@Override
public String toString() {
return "Component: " + getComponent();
}
}
/**
* Exception Event.
* An exception occured during component execution.
*
*/
public static class ExceptionEvent extends EventObject {
private static final long serialVersionUID = -1136021018405823527L;
ComponentException E;
ExceptionEvent(Object src, ComponentException E) {
super(src);
this.E = E;
}
/**
* Get the Component exception.
* @return the exception
*/
public ComponentException getException() {
return E;
}
@Override
public String toString() {
if (E == null) {
return "Exception: NULL";
}
StringBuffer b = new StringBuffer("\n");
if (E.getCause() != null) {
for (StackTraceElement ste : E.getCause().getStackTrace()) {
b.append(" " + ste.toString() + "\n");
}
}
return "Exception: " + E.getMessage() + " in '" + E.getSource() + "'" + b.toString();
}
}
/**
* Data flow event.
*
*/
public static class DataflowEvent extends EventObject {
private static final long serialVersionUID = -5551146005283344251L;
Access data;
Object value;
DataflowEvent(Object source, Access data, Object value) {
super(source);
this.data = data;
this.value = value;
}
/**
* Get field access info.
* Note: if you need to alter the value, do not use the Access object
* method setFieldValue(). call setValue() on this object instead.
* @return The field access object
*/
public Access getAccess() {
return data;
}
/**
* Get the data value that is passed on @In/@Out
* @return the data value that is passed around.
*/
public Object getValue() {
return value;
}
/** This methods allows altering the value being passed from @Out to @In.
* Call this from within the 'notice' event notification when
* you receive this DataflowEvent. An example would be a unit conversion.
*
* @param value the altered value.
*/
public void setValue(Object value) {
this.value = value;
}
@Override
public String toString() {
return "Flow: " + data.getComponent() + "@" + data.getField().getName() + " [" + value + "]";
}
}
}
| gpl-2.0 |
AntumDeluge/arianne-stendhal | src/games/stendhal/server/actions/equip/MoveableObject.java | 3044 | /***************************************************************************
* (C) Copyright 2003-2016 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.server.actions.equip;
import java.util.List;
import games.stendhal.server.entity.Entity;
import games.stendhal.server.entity.item.Corpse;
import games.stendhal.server.entity.player.Player;
/**
* source or destination object.
*
* @author hendrik
*/
public abstract class MoveableObject {
/** optional, parent item .*/
protected Entity parent;
/** the slot this item is in or should be placed into. */
protected String slot;
/** Player to notify about problems. */
protected final Player player;
/**
* Creates a new MoveableObject.
*
* @param player
* Player to notify about problems
*/
public MoveableObject(final Player player) {
this.player = player;
}
/**
* is this object valid?
*
* @return true, if the action may be performed, false otherwise
*/
public abstract boolean isValid();
/**
* is the owner of the slot in reach?
*
* @param entity
* entity to compare to
* @param distance
* max distance
* @return true, if it is reachable, false otherwise
*/
public abstract boolean checkDistance(Entity entity, double distance);
/**
* gets the name of the slot or null if there is none.
*
* @return slot name
*/
String getSlot() {
return slot;
}
/**
* returns log information.
*
* @return String[2]
*/
public abstract String[] getLogInfo();
/**
* Checks if RPobject is one the valid classes.
* @param validClasses
* @return true if the rpobject is one of the classes in <i>validClasses</i>.
*/
public boolean checkClass(final List<Class< ? >> validClasses) {
if (parent != null) {
return EquipUtil.isCorrectClass(validClasses, parent);
}
return true;
}
/**
* Checks if container is a corpse.
* @return true if container is a corpse.
*/
public boolean isContainerCorpse() {
if (parent != null) {
final Class< ? > clazz = Corpse.class;
return (clazz.isInstance(parent));
}
return false;
}
boolean isInvalidMoveable(final Player player, final double maxDistance, final List<Class< ? >> containerClassesList) {
return !isValid() || !checkDistance(player, maxDistance) || (!checkClass(containerClassesList));
}
}
| gpl-2.0 |
AntumDeluge/arianne-stendhal | tests/utilities/QuestHelper.java | 1786 | /* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package utilities;
import org.junit.BeforeClass;
import games.stendhal.server.core.engine.SingletonRepository;
import games.stendhal.server.maps.MockStendhalRPRuleProcessor;
import games.stendhal.server.maps.MockStendlRPWorld;
import marauroa.common.Log4J;
import marauroa.server.game.db.DatabaseFactory;
import utilities.RPClass.ItemTestHelper;
/**
* Helper methods for testing quests.
*
* @author hendrik
*/
public abstract class QuestHelper extends PlayerTestHelper {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Log4J.init();
new DatabaseFactory().initializeDatabase();
MockStendlRPWorld.get();
generatePlayerRPClasses();
ItemTestHelper.generateRPClasses();
generateNPCRPClasses();
MockStendhalRPRuleProcessor.get();
// load item configurations to handle money and other items
SingletonRepository.getEntityManager();
SingletonRepository.getNPCList().clear();
}
}
| gpl-2.0 |
giannoug/android_device_jxd_s7300b | packages/PicturePlayer/src/org/geometerplus/android/fbreader/TextSearchActivity.java | 1725 | /*
* Copyright (C) 2009-2010 Geometer Plus <contact@geometerplus.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
package org.geometerplus.android.fbreader;
import android.app.Activity;
import org.geometerplus.fbreader.fbreader.FBReaderApp;
import org.geometerplus.zlibrary.core.application.ZLApplication;
public class TextSearchActivity extends SearchActivity {
//Override
void onSuccess() {
FBReader.Instance.showTextSearchControls(true);
}
/*//Override
void onFailure() {
FBReader.Instance.showTextSearchControls(false);
}*/
//Override
String getFailureMessageResourceKey() {
return "textNotFound";
}
//Override
String getWaitMessageResourceKey() {
return "search";
}
//Override
boolean runSearch(final String pattern) {
final FBReaderApp fbReader = (FBReaderApp)ZLApplication.Instance();
fbReader.TextSearchPatternOption.setValue(pattern);
return fbReader.getTextView().search(pattern, true, false, false, false) != 0;
}
//Override
Activity getParentActivity() {
return FBReader.Instance;
}
}
| gpl-2.0 |
vinhqdang/replication-benchmarker | src/main/java/jbenchmarker/core/SequenceOperation.java | 7346 | /**
* Replication Benchmarker
* https://github.com/score-team/replication-benchmarker/ Copyright (C) 2013
* LORIA / Inria / SCORE Team
*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
package jbenchmarker.core;
import crdt.CRDT;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* local operation of document. T is a Character or a String.
*
* @author urso
*/
public class SequenceOperation<T> implements LocalOperation, Serializable {
public enum OpType {
insert, delete, replace, update, move, unsupported, noop, revert, undo
};
@Override
public SequenceOperation clone() {
throw new UnsupportedOperationException("Not implemented yet");
/* return new SequenceOperation(type, this.getReplica(), position, numberOf,
new ArrayList(content), new VectorClock(this.getVectorClock()));*/
}
@Override
public LocalOperation adaptTo(CRDT replica) {
int sizeDoc = ((MergeAlgorithm) replica).getDoc().viewLength();
//
if (getType() == OpType.insert) {
if (position > sizeDoc) {
position = sizeDoc; // an insert position exceeds document size
}
} else if (this.type == OpType.delete && sizeDoc == 0) {
return new SequenceOperation(OpType.noop, 0, 0, null);
} else if (this.position >= sizeDoc) {
position = sizeDoc - 1; // a position exceeds document size
}
if ((getType() == OpType.delete || getType() == OpType.update) && position + argument > sizeDoc) {
argument = sizeDoc - position; // delete document at position exceeds document size
}
if ((getType() == OpType.update || getType() == OpType.move) && position + content.size() > sizeDoc) {
content = content.subList(0, sizeDoc - position); // update document at position exceeds document size
}
return this;
}
private OpType type; // type of operation : insert or delete
private int position; // position in the document
private int argument; // length of a del or move position
private List<T> content; // content of an ins / update / move
public List<T> getContent() {
return content;
}
public String getContentAsString() {
StringBuilder s = new StringBuilder();
for (T t : content) {
s.append(t.toString());
}
return s.toString();
}
public int getLenghOfADel() {
return argument;
}
public int getPosition() {
return position;
}
public void setPosition(int p) {
position = p;
}
public int getDestination() {
return argument;
}
public OpType getType() {
return type;
}
public SequenceOperation(OpType type, int position, int argument, List<T> content) {
//super(replica, VC);
this.type = type;
this.position = position;
this.argument = argument;
this.content = content;
}
/*
* Construction of an insert operation (character)
*/
static public SequenceOperation<Character> insert(int position, String content) {
List<Character> l = new ArrayList<Character>();
for (int i = 0; i < content.length(); ++i) {
l.add(content.charAt(i));
}
return new SequenceOperation(OpType.insert, position, 0, l);
}
/*
* Construction of an delete operation
*/
static public SequenceOperation delete(int position, int offset) {
return new SequenceOperation(OpType.delete, position, offset, null);
}
/*
* Construction of a replace operation
*/
static public SequenceOperation<Character> replace(int position, int offset, String content) {
List<Character> l = new ArrayList<Character>();
for (int i = 0; i < content.length(); ++i) {
l.add(content.charAt(i));
}
return new SequenceOperation<Character>(OpType.replace, position, offset, l);
}
/*
* Construction of an update operation
*/
static public SequenceOperation<Character> update(int position, String content) {
List<Character> l = new ArrayList<Character>();
for (int i = 0; i < content.length(); ++i) {
l.add(content.charAt(i));
}
return new SequenceOperation<Character>(OpType.update, position, l.size(), l);
}
/*
* Construction of a move operation (potentially new content)
*/
static public SequenceOperation<Character> move(int position, int destination, String content) {
List<Character> l = new ArrayList<Character>();
for (int i = 0; i < content.length(); ++i) {
l.add(content.charAt(i));
}
return new SequenceOperation<Character>(OpType.move, position, destination, l);
}
static public <T> SequenceOperation<T> replace(int position, int offset, List<T> content) {
return new SequenceOperation(OpType.replace, position, offset, content);
}
/**
* Construction of a noop operation (usefull for pure merge)
*/
public static SequenceOperation noop() {
return new SequenceOperation(OpType.noop, -1, -1, null);
}
/*
* Construction of a stylage operation
*/
static public SequenceOperation<Character> unsupported() {
return new SequenceOperation(OpType.unsupported, -1, -1, null);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof SequenceOperation)) {
return false;
}
final SequenceOperation other = (SequenceOperation) obj;
if (this.type != other.type) {
return false;
}
if (this.position != other.position) {
return false;
}
if (this.argument != other.argument) {
return false;
}
if ((this.content == null) ? (other.content != null) : !this.content.equals(other.content)) {
return false;
}
return super.equals(obj);
}
@Override
public int hashCode() {
int hash = 7;
hash = 89 * hash + (this.type != null ? this.type.hashCode() : 0);
hash = 89 * hash + this.position;
hash = 89 * hash + this.argument;
hash = 89 * hash + (this.content != null ? this.content.hashCode() : 0);
return 89 * hash + super.hashCode();
}
@Override
public String toString() {
return "SequenceOperation{" + "type=" + type + ", position=" + position + ", arg=" + argument + ", content=" + content + '}';
}
}
| gpl-3.0 |
kingtang/spring-learn | spring-test-mvc/src/main/java/org/springframework/test/web/servlet/request/RequestPostProcessor.java | 1670 | /*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.test.web.servlet.request;
import org.springframework.mock.web.MockHttpServletRequest;
/**
* Extension point for applications or 3rd party libraries that wish to further
* initialize a {@link MockHttpServletRequest} instance after it has been built
* by {@link MockHttpServletRequestBuilder} or its subclass
* {@link MockMultipartHttpServletRequestBuilder}.
*
* <p>Implementations of this interface can be provided to
* {@link MockHttpServletRequestBuilder#with(RequestPostProcessor)} at the time
* when a request is about to be constructed.
*
* @author Rossen Stoyanchev
* @author Rob Winch
* @since 3.2
*/
public interface RequestPostProcessor {
/**
* Post-process the given {@code MockHttpServletRequest} after its creation
* and initialization through a {@code MockHttpServletRequestBuilder}.
* @param request the request to initialize
* @return the request to use, either the one passed in or a wrapped one
*/
MockHttpServletRequest postProcessRequest(MockHttpServletRequest request);
}
| gpl-3.0 |
kevinwang/minecarft | lwjgl-source-2.8.2/src/java/org/lwjgl/util/ReadableDimension.java | 2132 | /*
* Copyright (c) 2002-2008 LWJGL Project
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'LWJGL' nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.lwjgl.util;
/**
* Readonly interface for Dimensions
* @author $Author: spasi $
* @version $Revision: 3418 $
* $Id: ReadableDimension.java 3418 2010-09-28 21:11:35Z spasi $
*/
public interface ReadableDimension {
/**
* Get the width
* @return int
*/
int getWidth();
/**
* Get the height
* @return int
*/
int getHeight();
/**
* Copy this ReadableDimension into a destination Dimension
* @param dest The destination
*/
void getSize(WritableDimension dest);
}
| gpl-3.0 |
Hunsu/ntorrent | plugins/ntorrent/source/ntorrent/connection/model/ProxyProfile.java | 416 | package ntorrent.connection.model;
import java.io.Serializable;
import java.net.Proxy.Type;
public interface ProxyProfile extends Serializable,Cloneable {
public Type getProxyType();
public String getHost();
public Integer getPort();
public boolean usingAuthentication();
public String getUsername();
public String getPassword();
public ProxyProfile getClonedInstance() throws CloneNotSupportedException;
}
| gpl-3.0 |
madhumita-dfki/Excitement-Open-Platform | alignmentedas/src/test/java/eu/excitementproject/eop/alignmentedas/scorers/SimpleWordCoverageCounterTest.java | 3511 | package eu.excitementproject.eop.alignmentedas.scorers;
import static org.junit.Assert.*;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Vector;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.uima.jcas.JCas;
import org.junit.Assume;
import org.junit.Test;
import org.uimafit.util.JCasUtil;
import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token;
import eu.excitement.type.alignment.Link;
import eu.excitement.type.alignment.LinkUtils;
import eu.excitementproject.eop.alignmentedas.p1eda.scorers.SimpleWordCoverageCounter;
import eu.excitementproject.eop.common.component.alignment.AlignmentComponent;
import eu.excitementproject.eop.common.component.scoring.ScoringComponent;
import eu.excitementproject.eop.core.component.alignment.phraselink.IdenticalLemmaPhraseLinker;
import eu.excitementproject.eop.lap.dkpro.TreeTaggerEN;
public class SimpleWordCoverageCounterTest {
@Test
public void test() {
// Set Log4J for the test
BasicConfigurator.resetConfiguration();
BasicConfigurator.configure();
Logger.getRootLogger().setLevel(Level.DEBUG); // for UIMA (hiding < INFO)
Logger testlogger = Logger.getLogger(getClass().getName());
// prepare a lemmatizer
TreeTaggerEN lemmatizer = null;
JCas aJCas = null;
try
{
lemmatizer = new TreeTaggerEN();
aJCas = lemmatizer.generateSingleTHPairCAS("Lovely TreeTagger test is in sight, or lovely goes not?", "Lovely goes a test.");
}
catch (Exception e)
{
// check if this is due to missing TreeTagger binary and model.
// In such a case, we just skip this test.
// (see /lap/src/scripts/treetagger/README.txt to how to install TreeTagger)
if (ExceptionUtils.getRootCause(e) instanceof java.io.IOException)
{
testlogger.info("Skipping the test: TreeTagger binary and/or models missing. \n To run this testcase, TreeTagger installation is needed. (see /lap/src/scripts/treetagger/README.txt)");
Assume.assumeTrue(false); // we won't test this test case any longer.
}
fail(e.getMessage());
}
// annotate with identity
try {
AlignmentComponent idtLinker = new IdenticalLemmaPhraseLinker();
idtLinker.annotate(aJCas);
LinkUtils.dumpTokenLevelLinks(aJCas, System.out);
}
catch (Exception e)
{
fail(e.getMessage());
}
// get first token of H, and test the method
// filterLinksWithTargetsIncluding
try {
JCas hView = aJCas.getView("HypothesisView");
Collection<Token> tokens = JCasUtil.select(hView, Token.class);
List<Link> links = LinkUtils.selectLinksWith(aJCas, (String) null);
Iterator<Token> ti = tokens.iterator();
ti.next(); // first token
Token t = ti.next(); // second token
List<Link> filteredLinks = LinkUtils.filterLinksWithTargetsIncluding(links, t, Link.Direction.TtoH);
//System.out.println(filteredLinks.size());
assertEquals(1, filteredLinks.size());
}
catch (Exception e)
{
fail(e.getMessage());
}
// Okay, Let's do some coverage test.
ScoringComponent count1 = new SimpleWordCoverageCounter(null); // count all
try {
Vector<Double> v = count1.calculateScores(aJCas);
testlogger.info(v.get(0));
testlogger.info(v.get(1));
testlogger.info(v.get(2));
testlogger.info(v.get(3));
}
catch (Exception e)
{
fail(e.getMessage());
}
}
}
| gpl-3.0 |
SKCraft/Applied-Energistics-2 | src/main/java/appeng/container/slot/SlotOutput.java | 1170 | /*
* This file is part of Applied Energistics 2.
* Copyright (c) 2013 - 2014, AlgorithmX2, All rights reserved.
*
* Applied Energistics 2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Applied Energistics 2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Applied Energistics 2. If not, see <http://www.gnu.org/licenses/lgpl>.
*/
package appeng.container.slot;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
public class SlotOutput extends AppEngSlot
{
public SlotOutput( IInventory a, int b, int c, int d, int i )
{
super( a, b, c, d );
this.IIcon = i;
}
@Override
public boolean isItemValid( ItemStack i )
{
return false;
}
}
| gpl-3.0 |
kanekikun420/opsu | src/itdelatrisu/opsu/states/Splash.java | 5948 | /*
* opsu! - an open-source osu! client
* Copyright (C) 2014, 2015 Jeffrey Han
*
* opsu! is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* opsu! is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with opsu!. If not, see <http://www.gnu.org/licenses/>.
*/
package itdelatrisu.opsu.states;
import itdelatrisu.opsu.GameImage;
import itdelatrisu.opsu.Opsu;
import itdelatrisu.opsu.Options;
import itdelatrisu.opsu.OszUnpacker;
import itdelatrisu.opsu.Utils;
import itdelatrisu.opsu.audio.MusicController;
import itdelatrisu.opsu.audio.SoundController;
import itdelatrisu.opsu.beatmap.BeatmapParser;
import itdelatrisu.opsu.beatmap.BeatmapSetList;
import itdelatrisu.opsu.beatmap.BeatmapWatchService;
import itdelatrisu.opsu.replay.ReplayImporter;
import itdelatrisu.opsu.ui.UI;
import itdelatrisu.opsu.ui.animations.AnimatedValue;
import itdelatrisu.opsu.ui.animations.AnimationEquation;
import java.io.File;
import org.newdawn.slick.Color;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Input;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.state.BasicGameState;
import org.newdawn.slick.state.StateBasedGame;
/**
* "Splash Screen" state.
* <p>
* Loads game resources and enters "Main Menu" state.
*/
public class Splash extends BasicGameState {
/** Minimum time, in milliseconds, to display the splash screen (and fade in the logo). */
private static final int MIN_SPLASH_TIME = 400;
/** Whether or not loading has completed. */
private boolean finished = false;
/** Loading thread. */
private Thread thread;
/** Number of times the 'Esc' key has been pressed. */
private int escapeCount = 0;
/** Whether the skin being loaded is a new skin (for program restarts). */
private boolean newSkin = false;
/** Whether the watch service is newly enabled (for program restarts). */
private boolean watchServiceChange = false;
/** Logo alpha level. */
private AnimatedValue logoAlpha;
// game-related variables
private final int state;
private GameContainer container;
private boolean init = false;
public Splash(int state) {
this.state = state;
}
@Override
public void init(GameContainer container, StateBasedGame game)
throws SlickException {
this.container = container;
// check if skin changed
if (Options.getSkin() != null)
this.newSkin = (Options.getSkin().getDirectory() != Options.getSkinDir());
// check if watch service newly enabled
this.watchServiceChange = Options.isWatchServiceEnabled() && BeatmapWatchService.get() == null;
// load Utils class first (needed in other 'init' methods)
Utils.init(container, game);
// fade in logo
this.logoAlpha = new AnimatedValue(MIN_SPLASH_TIME, 0f, 1f, AnimationEquation.LINEAR);
GameImage.MENU_LOGO.getImage().setAlpha(0f);
}
@Override
public void render(GameContainer container, StateBasedGame game, Graphics g)
throws SlickException {
g.setBackground(Color.black);
GameImage.MENU_LOGO.getImage().drawCentered(container.getWidth() / 2, container.getHeight() / 2);
UI.drawLoadingProgress(g);
}
@Override
public void update(GameContainer container, StateBasedGame game, int delta)
throws SlickException {
if (!init) {
init = true;
// resources already loaded (from application restart)
if (BeatmapSetList.get() != null) {
if (newSkin || watchServiceChange) { // need to reload resources
thread = new Thread() {
@Override
public void run() {
// reload beatmaps if watch service newly enabled
if (watchServiceChange)
BeatmapParser.parseAllFiles(Options.getBeatmapDir());
// reload sounds if skin changed
// TODO: only reload each sound if actually needed?
if (newSkin)
SoundController.init();
finished = true;
thread = null;
}
};
thread.start();
} else // don't reload anything
finished = true;
}
// load all resources in a new thread
else {
thread = new Thread() {
@Override
public void run() {
File beatmapDir = Options.getBeatmapDir();
// unpack all OSZ archives
OszUnpacker.unpackAllFiles(Options.getOSZDir(), beatmapDir);
// parse song directory
BeatmapParser.parseAllFiles(beatmapDir);
// import replays
ReplayImporter.importAllReplaysFromDir(Options.getReplayImportDir());
// load sounds
SoundController.init();
finished = true;
thread = null;
}
};
thread.start();
}
}
// fade in logo
if (logoAlpha.update(delta))
GameImage.MENU_LOGO.getImage().setAlpha(logoAlpha.getValue());
// change states when loading complete
if (finished && logoAlpha.getValue() >= 1f) {
// initialize song list
if (BeatmapSetList.get().size() > 0) {
BeatmapSetList.get().init();
if (Options.isThemeSongEnabled())
MusicController.playThemeSong();
else
((SongMenu) game.getState(Opsu.STATE_SONGMENU)).setFocus(BeatmapSetList.get().getRandomNode(), -1, true, true);
}
// play the theme song
else
MusicController.playThemeSong();
game.enterState(Opsu.STATE_MAINMENU);
}
}
@Override
public int getID() { return state; }
@Override
public void keyPressed(int key, char c) {
if (key == Input.KEY_ESCAPE) {
// close program
if (++escapeCount >= 3)
container.exit();
// stop parsing beatmaps by sending interrupt to BeatmapParser
else if (thread != null)
thread.interrupt();
}
}
}
| gpl-3.0 |
eethomas/eucalyptus | clc/modules/simpleworkflow/src/main/java/com/eucalyptus/simpleworkflow/persist/PersistenceWorkflowExecutions.java | 3941 | /*************************************************************************
* Copyright 2009-2015 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
************************************************************************/
package com.eucalyptus.simpleworkflow.persist;
import static com.eucalyptus.simpleworkflow.SimpleWorkflowProperties.getWorkflowExecutionDurationMillis;
import static com.eucalyptus.simpleworkflow.SimpleWorkflowProperties.getWorkflowExecutionRetentionDurationMillis;
import static com.eucalyptus.simpleworkflow.common.SimpleWorkflowMetadata.WorkflowExecutionMetadata;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.hibernate.criterion.Restrictions;
import com.eucalyptus.component.annotation.ComponentNamed;
import com.eucalyptus.simpleworkflow.SwfMetadataException;
import com.eucalyptus.simpleworkflow.WorkflowExecution;
import com.eucalyptus.simpleworkflow.WorkflowExecutions;
import com.eucalyptus.auth.principal.OwnerFullName;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
/**
*
*/
@ComponentNamed
public class PersistenceWorkflowExecutions extends SwfPersistenceSupport<WorkflowExecutionMetadata,WorkflowExecution> implements WorkflowExecutions {
public PersistenceWorkflowExecutions( ) {
super( "workflow-execution" );
}
public <T> List<T> listTimedOut( final long time,
final Function<? super WorkflowExecution,T> transform ) throws SwfMetadataException {
return listByExample(
WorkflowExecution.exampleForOpenWorkflow(),
Predicates.alwaysTrue( ),
Restrictions.disjunction( )
.add( Restrictions.lt( "timeoutTimestamp", new Date( time ) ) )
.add( Restrictions.lt( "creationTimestamp", new Date( time - getWorkflowExecutionDurationMillis() ) ) ),
Collections.<String,String>emptyMap( ),
transform );
}
public <T> List<T> listRetentionExpired( final long time,
final Function<? super WorkflowExecution,T> transform ) throws SwfMetadataException {
return listByExample(
WorkflowExecution.exampleForClosedWorkflow(),
Predicates.alwaysTrue(),
Restrictions.disjunction()
.add( Restrictions.lt( "retentionTimestamp", new Date( time ) ) )
.add( Restrictions.lt( "closeTimestamp", new Date( time - getWorkflowExecutionRetentionDurationMillis() ) ) ),
Collections.<String, String>emptyMap(),
transform );
}
@Override
public long countOpenByDomain( final OwnerFullName ownerFullName,
final String domain ) throws SwfMetadataException {
return countByExample( WorkflowExecution.exampleForOpenWorkflow( ownerFullName, domain, null ) );
}
@Override
protected WorkflowExecution exampleWithOwner( final OwnerFullName ownerFullName ) {
return WorkflowExecution.exampleWithOwner( ownerFullName );
}
@Override
protected WorkflowExecution exampleWithName( final OwnerFullName ownerFullName, final String name ) {
return WorkflowExecution.exampleWithName( ownerFullName, name );
}
}
| gpl-3.0 |
LasseBP/overture | core/typechecker/src/main/java/org/overture/typechecker/utilities/VariableNameCollector.java | 7679 | /*
* #%~
* The VDM Type Checker
* %%
* Copyright (C) 2008 - 2014 Overture
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-3.0.html>.
* #~%
*/
package org.overture.typechecker.utilities;
import org.overture.ast.analysis.AnalysisException;
import org.overture.ast.analysis.AnswerAdaptor;
import org.overture.ast.definitions.AAssignmentDefinition;
import org.overture.ast.definitions.AClassInvariantDefinition;
import org.overture.ast.definitions.AEqualsDefinition;
import org.overture.ast.definitions.AExplicitFunctionDefinition;
import org.overture.ast.definitions.AExplicitOperationDefinition;
import org.overture.ast.definitions.AExternalDefinition;
import org.overture.ast.definitions.AImplicitFunctionDefinition;
import org.overture.ast.definitions.AImplicitOperationDefinition;
import org.overture.ast.definitions.AImportedDefinition;
import org.overture.ast.definitions.AInheritedDefinition;
import org.overture.ast.definitions.AInstanceVariableDefinition;
import org.overture.ast.definitions.ALocalDefinition;
import org.overture.ast.definitions.AMultiBindListDefinition;
import org.overture.ast.definitions.AMutexSyncDefinition;
import org.overture.ast.definitions.ANamedTraceDefinition;
import org.overture.ast.definitions.APerSyncDefinition;
import org.overture.ast.definitions.ARenamedDefinition;
import org.overture.ast.definitions.AStateDefinition;
import org.overture.ast.definitions.AThreadDefinition;
import org.overture.ast.definitions.ATypeDefinition;
import org.overture.ast.definitions.AUntypedDefinition;
import org.overture.ast.definitions.AValueDefinition;
import org.overture.ast.definitions.SClassDefinition;
import org.overture.ast.intf.lex.ILexNameToken;
import org.overture.ast.lex.LexNameList;
import org.overture.ast.node.INode;
import org.overture.typechecker.assistant.ITypeCheckerAssistantFactory;
//TODO Add assistant Javadoc
/**
* A refactored assistant / functionality visitor. This class implements a way to collect variable names from any node
* in the AST
*
* @author kel
*/
public class VariableNameCollector extends AnswerAdaptor<LexNameList>
{
protected ITypeCheckerAssistantFactory af;
public VariableNameCollector(ITypeCheckerAssistantFactory af)
{
this.af = af;
}
@Override
public LexNameList caseAAssignmentDefinition(AAssignmentDefinition node)
throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList defaultSClassDefinition(SClassDefinition node)
throws AnalysisException
{
return af.createPDefinitionListAssistant().getVariableNames(node.getDefinitions());
}
@Override
public LexNameList caseAClassInvariantDefinition(
AClassInvariantDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAEqualsDefinition(AEqualsDefinition node)
throws AnalysisException
{
return node.getDefs() == null ? new LexNameList()
: af.createPDefinitionListAssistant().getVariableNames(node.getDefs());
}
@Override
public LexNameList caseAExplicitFunctionDefinition(
AExplicitFunctionDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAExplicitOperationDefinition(
AExplicitOperationDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAExternalDefinition(AExternalDefinition node)
throws AnalysisException
{
return node.getState().apply(THIS); // TODO: Is this applicable?
}
@Override
public LexNameList caseAImplicitFunctionDefinition(
AImplicitFunctionDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAImplicitOperationDefinition(
AImplicitOperationDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAImportedDefinition(AImportedDefinition node)
throws AnalysisException
{
return node.getDef().apply(THIS);
}
@Override
public LexNameList caseAInheritedDefinition(AInheritedDefinition node)
throws AnalysisException
{
// return AInheritedDefinitionAssistantTC.getVariableNames((AInheritedDefinition) node);
LexNameList names = new LexNameList();
// TODO:What About Here, how to I need to handle it. like I have it or Bring the method to this class?
DefinitionTypeFinder.checkSuperDefinition(node);
for (ILexNameToken vn : node.getSuperdef().apply(THIS))
{
names.add(vn.getModifiedName(node.getName().getModule()));
}
return names;
}
@Override
public LexNameList caseAInstanceVariableDefinition(
AInstanceVariableDefinition node) throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseALocalDefinition(ALocalDefinition node)
throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAMultiBindListDefinition(
AMultiBindListDefinition node) throws AnalysisException
{
return node.getDefs() == null ? new LexNameList()
: af.createPDefinitionListAssistant().getVariableNames(node.getDefs());
}
@Override
public LexNameList caseAMutexSyncDefinition(AMutexSyncDefinition node)
throws AnalysisException
{
return new LexNameList();
}
@Override
public LexNameList caseANamedTraceDefinition(ANamedTraceDefinition node)
throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAPerSyncDefinition(APerSyncDefinition node)
throws AnalysisException
{
return new LexNameList();
}
@Override
public LexNameList caseARenamedDefinition(ARenamedDefinition node)
throws AnalysisException
{
LexNameList both = new LexNameList(node.getName());
both.add(node.getDef().getName());
return both;
}
@Override
public LexNameList caseAStateDefinition(AStateDefinition node)
throws AnalysisException
{
return af.createPDefinitionListAssistant().getVariableNames(node.getStateDefs());
}
@Override
public LexNameList caseAThreadDefinition(AThreadDefinition node)
throws AnalysisException
{
return node.getOperationDef() == null ? null
: new LexNameList(node.getOperationDef().getName());
}
@Override
public LexNameList caseATypeDefinition(ATypeDefinition node)
throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAUntypedDefinition(AUntypedDefinition node)
throws AnalysisException
{
return new LexNameList(node.getName());
}
@Override
public LexNameList caseAValueDefinition(AValueDefinition node)
throws AnalysisException
{
return af.createPPatternAssistant().getVariableNames(node.getPattern());
}
@Override
public LexNameList createNewReturnValue(INode node)
{
assert false : "default case should never happen in getVariableNames";
return null;
}
@Override
public LexNameList createNewReturnValue(Object node)
{
assert false : "default case should never happen in getVariableNames";
return null;
}
}
| gpl-3.0 |
CloudyPadmal/android-client | mifosng-android/src/main/java/com/mifos/objects/accounts/loan/Transaction.java | 6177 | /*
* This project is licensed under the open source MPL V2.
* See https://github.com/openMF/android-client/blob/master/LICENSE.md
*/
package com.mifos.objects.accounts.loan;
import android.os.Parcel;
import android.os.Parcelable;
import com.mifos.objects.accounts.savings.Currency;
import java.util.ArrayList;
import java.util.List;
public class Transaction implements Parcelable {
private Integer id;
private Integer officeId;
private String officeName;
private Type type;
private List<Integer> date = new ArrayList<Integer>();
private Currency currency;
private PaymentDetailData paymentDetailData;
private Double amount;
private Double principalPortion;
private Double interestPortion;
private Double feeChargesPortion;
private Double penaltyChargesPortion;
private Double overpaymentPortion;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Integer getOfficeId() {
return officeId;
}
public void setOfficeId(Integer officeId) {
this.officeId = officeId;
}
public String getOfficeName() {
return officeName;
}
public void setOfficeName(String officeName) {
this.officeName = officeName;
}
public Type getType() {
return type;
}
public void setType(Type type) {
this.type = type;
}
public List<Integer> getDate() {
return date;
}
public void setDate(List<Integer> date) {
this.date = date;
}
public Currency getCurrency() {
return currency;
}
public void setCurrency(Currency currency) {
this.currency = currency;
}
public PaymentDetailData getPaymentDetailData() {
return paymentDetailData;
}
public void setPaymentDetailData(PaymentDetailData paymentDetailData) {
this.paymentDetailData = paymentDetailData;
}
public Double getAmount() {
return amount;
}
public void setAmount(Double amount) {
this.amount = amount;
}
public Double getPrincipalPortion() {
return principalPortion;
}
public void setPrincipalPortion(Double principalPortion) {
this.principalPortion = principalPortion;
}
public Double getInterestPortion() {
return interestPortion;
}
public void setInterestPortion(Double interestPortion) {
this.interestPortion = interestPortion;
}
public Double getFeeChargesPortion() {
return feeChargesPortion;
}
public void setFeeChargesPortion(Double feeChargesPortion) {
this.feeChargesPortion = feeChargesPortion;
}
public Double getPenaltyChargesPortion() {
return penaltyChargesPortion;
}
public void setPenaltyChargesPortion(Double penaltyChargesPortion) {
this.penaltyChargesPortion = penaltyChargesPortion;
}
public Double getOverpaymentPortion() {
return overpaymentPortion;
}
public void setOverpaymentPortion(Double overpaymentPortion) {
this.overpaymentPortion = overpaymentPortion;
}
@Override
public String toString() {
return "Transaction{" +
"id=" + id +
", officeId=" + officeId +
", officeName='" + officeName + '\'' +
", type=" + type +
", date=" + date +
", currency=" + currency +
", paymentDetailData=" + paymentDetailData +
", amount=" + amount +
", principalPortion=" + principalPortion +
", interestPortion=" + interestPortion +
", feeChargesPortion=" + feeChargesPortion +
", penaltyChargesPortion=" + penaltyChargesPortion +
", overpaymentPortion=" + overpaymentPortion +
'}';
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(this.id);
dest.writeValue(this.officeId);
dest.writeString(this.officeName);
dest.writeParcelable(this.type, flags);
dest.writeList(this.date);
dest.writeParcelable(this.currency, flags);
dest.writeParcelable(this.paymentDetailData, flags);
dest.writeValue(this.amount);
dest.writeValue(this.principalPortion);
dest.writeValue(this.interestPortion);
dest.writeValue(this.feeChargesPortion);
dest.writeValue(this.penaltyChargesPortion);
dest.writeValue(this.overpaymentPortion);
}
public Transaction() {
}
protected Transaction(Parcel in) {
this.id = (Integer) in.readValue(Integer.class.getClassLoader());
this.officeId = (Integer) in.readValue(Integer.class.getClassLoader());
this.officeName = in.readString();
this.type = in.readParcelable(Type.class.getClassLoader());
this.date = new ArrayList<Integer>();
in.readList(this.date, Integer.class.getClassLoader());
this.currency = in.readParcelable(Currency.class.getClassLoader());
this.paymentDetailData = in.readParcelable(PaymentDetailData.class.getClassLoader());
this.amount = (Double) in.readValue(Double.class.getClassLoader());
this.principalPortion = (Double) in.readValue(Double.class.getClassLoader());
this.interestPortion = (Double) in.readValue(Double.class.getClassLoader());
this.feeChargesPortion = (Double) in.readValue(Double.class.getClassLoader());
this.penaltyChargesPortion = (Double) in.readValue(Double.class.getClassLoader());
this.overpaymentPortion = (Double) in.readValue(Double.class.getClassLoader());
}
public static final Parcelable.Creator<Transaction> CREATOR =
new Parcelable.Creator<Transaction>() {
@Override
public Transaction createFromParcel(Parcel source) {
return new Transaction(source);
}
@Override
public Transaction[] newArray(int size) {
return new Transaction[size];
}
};
}
| mpl-2.0 |
lat-lon/geomajas | application/geomajas-gwt-example/src/main/java/org/geomajas/example/gwt/client/samples/toolbar/CustomToolbarToolsSample.java | 3605 | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2013 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.example.gwt.client.samples.toolbar;
import org.geomajas.example.gwt.client.samples.base.SamplePanel;
import org.geomajas.example.gwt.client.samples.base.SamplePanelFactory;
import org.geomajas.example.gwt.client.samples.i18n.I18nProvider;
import org.geomajas.geometry.Coordinate;
import org.geomajas.gwt.client.action.ToolbarModalAction;
import org.geomajas.gwt.client.controller.AbstractGraphicsController;
import org.geomajas.gwt.client.controller.GraphicsController;
import org.geomajas.gwt.client.controller.PanController;
import org.geomajas.gwt.client.widget.MapWidget;
import org.geomajas.gwt.client.widget.Toolbar;
import com.google.gwt.event.dom.client.MouseUpEvent;
import com.smartgwt.client.util.SC;
import com.smartgwt.client.widgets.Canvas;
import com.smartgwt.client.widgets.events.ClickEvent;
import com.smartgwt.client.widgets.layout.VLayout;
/**
* <p>
* Sample that shows how a custom tools can be added to a toolbar.
* </p>
*
* @author Frank Wynants
*/
public class CustomToolbarToolsSample extends SamplePanel {
public static final String TITLE = "CustomToolbarTools";
public static final SamplePanelFactory FACTORY = new SamplePanelFactory() {
public SamplePanel createPanel() {
return new CustomToolbarToolsSample();
}
};
/**
* @return The viewPanel Canvas
*/
public Canvas getViewPanel() {
VLayout layout = new VLayout();
layout.setWidth100();
layout.setHeight100();
final MapWidget map = new MapWidget("mapOsm", "gwt-samples");
map.setController(new PanController(map));
final Toolbar toolbar = new Toolbar(map);
toolbar.setButtonSize(Toolbar.BUTTON_SIZE_BIG);
// Create a custom controller that will be enabled/disabled by a button in the toolbar:
final GraphicsController customController = new AbstractGraphicsController(map) {
public void onMouseUp(MouseUpEvent event) {
Coordinate screenPosition = getScreenPosition(event);
Coordinate worldPosition = getWorldPosition(event);
SC.say(I18nProvider.getSampleMessages().customControllerScreenCoordinates() + " = " + screenPosition
+ "<br/>" + I18nProvider.getSampleMessages().customControllerWorldCoordinates() + " = "
+ worldPosition);
}
};
// Add the customController to the toolbar using a custom ToolbarModalAction button
toolbar.addModalButton(new ToolbarModalAction("[ISOMORPHIC]/geomajas/widget/target.gif", I18nProvider
.getSampleMessages().customToolbarToolsTooltip()) {
@Override
public void onSelect(ClickEvent event) {
map.setController(customController);
}
@Override
public void onDeselect(ClickEvent event) {
map.setController(null);
}
});
layout.addMember(toolbar);
layout.addMember(map);
return layout;
}
public String getDescription() {
return I18nProvider.getSampleMessages().customToolbarToolsDescription();
}
public String getSourceFileName() {
return "classpath:org/geomajas/example/gwt/client/samples/toolbar/CustomToolbarToolsSample.txt";
}
public String[] getConfigurationFiles() {
return new String[] { "WEB-INF/layerOsm.xml",
"WEB-INF/mapOsm.xml" };
}
public String ensureUserLoggedIn() {
return "luc";
}
}
| agpl-3.0 |
calogera/DataHubSystem | core/src/main/java/fr/gael/dhus/datastore/scanner/Scanner.java | 1659 | /*
* Data Hub Service (DHuS) - For Space data distribution.
* Copyright (C) 2013,2014,2015 GAEL Systems
*
* This file is part of DHuS software sources.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package fr.gael.dhus.datastore.scanner;
import java.util.List;
import fr.gael.drbx.cortex.DrbCortexItemClass;
/**
* Scanner aims to scan uri in order to retrieve data.
*/
public interface Scanner
{
/**
* Scan
* @return number of products found, or -1 if not implemented
* @throws InterruptedException is user stop called.
*/
int scan () throws InterruptedException;
void stop ();
public boolean isStopped ();
AsynchronousLinkedList<URLExt> getScanList();
void setSupportedClasses (List<DrbCortexItemClass>supported);
/**
* Force the navigation through the scanned directories even if the
* directory has been recognized.(default false)
*/
public void setForceNavigate (boolean force);
public boolean isForceNavigate();
public void setUserPattern (String pattern);
}
| agpl-3.0 |
olivermay/geomajas | face/geomajas-face-puregwt/client-impl/src/test/java/org/geomajas/puregwt/client/map/ViewPortMaxBoundsTest.java | 3544 | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2013 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.puregwt.client.map;
import javax.annotation.PostConstruct;
import junit.framework.Assert;
import org.geomajas.configuration.client.ClientMapInfo;
import org.geomajas.geometry.Bbox;
import org.geomajas.puregwt.client.GeomajasTestModule;
import org.geomajas.testdata.ReloadContext;
import org.geomajas.testdata.ReloadContextTestExecutionListener;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.web.bindery.event.shared.EventBus;
@TestExecutionListeners(listeners = { ReloadContextTestExecutionListener.class,
DependencyInjectionTestExecutionListener.class })
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "/org/geomajas/spring/geomajasContext.xml", "viewPortContext.xml",
"mapViewPortBeans.xml", "mapBeansNoResolutions.xml", "layerViewPortBeans.xml" })
@ReloadContext
public class ViewPortMaxBoundsTest {
private static final Injector INJECTOR = Guice.createInjector(new GeomajasTestModule());
@Autowired
@Qualifier(value = "mapViewPortBeans")
private ClientMapInfo mapInfo;
private MapEventBus eventBus;
private ViewPort viewPort;
@PostConstruct
public void initialize() {
eventBus = new MapEventBusImpl(this, INJECTOR.getInstance(EventBus.class));
viewPort = INJECTOR.getInstance(ViewPort.class);
viewPort.initialize(mapInfo, eventBus);
viewPort.setMapSize(1000, 1000);
}
@Test
@ReloadContext
public void testInitialBounds() {
viewPort.initialize(mapInfo, eventBus);
Bbox maxBounds = viewPort.getMaximumBounds();
Assert.assertEquals(-100.0, maxBounds.getX());
Assert.assertEquals(-100.0, maxBounds.getY());
Assert.assertEquals(100.0, maxBounds.getMaxX());
Assert.assertEquals(100.0, maxBounds.getMaxY());
}
@Test
@ReloadContext
public void testSetMaxBounds() {
mapInfo.setMaxBounds(new org.geomajas.geometry.Bbox(0, 0, 10, 10));
viewPort.initialize(mapInfo, eventBus);
Bbox maxBounds = viewPort.getMaximumBounds();
Assert.assertEquals(0.0, maxBounds.getX());
Assert.assertEquals(0.0, maxBounds.getY());
Assert.assertEquals(10.0, maxBounds.getMaxX());
Assert.assertEquals(10.0, maxBounds.getMaxY());
}
@Test
@ReloadContext
public void testLayerUnion() {
mapInfo.setMaxBounds(org.geomajas.geometry.Bbox.ALL);
mapInfo.getLayers().get(0).setMaxExtent(new org.geomajas.geometry.Bbox(0, 0, 500, 500));
viewPort.initialize(mapInfo, eventBus);
Bbox maxBounds = viewPort.getMaximumBounds();
Assert.assertEquals(-100.0, maxBounds.getX());
Assert.assertEquals(-100.0, maxBounds.getY());
Assert.assertEquals(500.0, maxBounds.getMaxX());
Assert.assertEquals(500.0, maxBounds.getMaxY());
}
} | agpl-3.0 |
4ment/beast-mcmc | src/test/dr/evomodel/substmodel/AsymQuadTest.java | 7825 | package test.dr.evomodel.substmodel;
import junit.framework.TestCase;
import dr.inference.model.Parameter;
import dr.oldevomodel.substmodel.AsymmetricQuadraticModel;
import dr.evolution.datatype.Microsatellite;
/**
* @author Chieh-Hsi Wu
*
* Tests AsymmetricQuadraticModel exponentiation.
*/
public class AsymQuadTest extends TestCase {
interface Instance {
public Microsatellite getDataType();
public double getExpanConst();
public double getExpanLin();
public double getExpanQuad();
public double getContractConst();
public double getContractLin();
public double getContractQuad();
double getDistance();
double[] getExpectedPi();
public double[] getExpectedResult();
}
Instance test0 = new Instance() {
public Microsatellite getDataType(){
return new Microsatellite(1,4);
}
public double getExpanConst(){
return 1.0;
}
public double getExpanLin(){
return 5.0;
}
public double getExpanQuad(){
return 0.0;
}
public double getContractConst(){
return 1.0;
}
public double getContractLin(){
return 5.0;
}
public double getContractQuad(){
return 0.0;
}
public double getDistance() {
return 0.1;
}
public double[] getExpectedPi() {
return new double[]{
0.757532281205165, 0.126255380200861, 0.068866571018651, 0.047345767575323
};
}
public double[] getExpectedResult() {
return new double[]{
0.979555040783480, 0.019216583311851, 0.001139116232520, 0.000089259672149,
0.115299499871107, 0.780702902910835, 0.092806213742576, 0.011191383475483,
0.012530278557716, 0.170144725194722, 0.654730453041978, 0.162594543205584,
0.001428154754382, 0.029843689267955, 0.236501153753577, 0.732227002224086
};
}
};
Instance test1 = new Instance() {
public Microsatellite getDataType(){
return new Microsatellite(1,4);
}
public double getExpanConst(){
return 1.0;
}
public double getExpanLin(){
return 5.0;
}
public double getExpanQuad(){
return 0.0;
}
public double getContractConst(){
return 2.0;
}
public double getContractLin(){
return 3.0;
}
public double getContractQuad(){
return 0.0;
}
public double getDistance() {
return 0.2;
}
public double[] getExpectedPi() {
return new double[]{
0.666666666666667, 0.133333333333333, 0.100000000000000, 0.100000000000000
};
}
public double[] getExpectedResult() {
return new double[]{
0.965025560544615, 0.031394424214122, 0.003139726429413, 0.000440288811850,
0.156972121070610, 0.676199129838700, 0.136694646485644, 0.030134102605046,
0.020931509529421, 0.182259528647526, 0.546569017275914, 0.250239944547139,
0.002935258745666, 0.040178803473394, 0.250239944547139, 0.706645993233800
};
}
};
Instance test2 = new Instance() {
public Microsatellite getDataType(){
return new Microsatellite(1,4);
}
public double getExpanConst(){
return 1.0;
}
public double getExpanLin(){
return 5.0;
}
public double getExpanQuad(){
return 3.0;
}
public double getContractConst(){
return 2.0;
}
public double getContractLin(){
return 3.0;
}
public double getContractQuad(){
return 5.0;
}
public double getDistance() {
return 0.3;
}
public double[] getExpectedPi() {
return new double[]{
0.873099838521076, 0.087309983852108, 0.028063923381035, 0.011526254245782
};
}
public double[] getExpectedResult() {
return new double[]{
0.951679358560076, 0.039846739718488, 0.006618862899417, 0.001855038822019,
0.398467397184877, 0.419766204810971, 0.131559686528619, 0.050206711475533,
0.205920179092971, 0.409296802533481, 0.257041454224293, 0.127741564149256,
0.140516950382903, 0.380309775814669, 0.311022938798188, 0.168150335004240
};
}
};
Instance[] all = {test0, test1, test2};
public void testAsymmetricQuadraticModel() {
for (Instance test : all) {
Parameter expanConst = new Parameter.Default(1,test.getExpanConst());
Parameter expanLin = new Parameter.Default(1, test.getExpanLin());
Parameter expanQuad = new Parameter.Default(1, test.getExpanQuad());
Parameter contractConst = new Parameter.Default(1,test.getContractConst());
Parameter contractLin = new Parameter.Default(1, test.getContractLin());
Parameter contractQuad = new Parameter.Default(1, test.getContractQuad());
Microsatellite microsat = test.getDataType();
AsymmetricQuadraticModel aqm = new AsymmetricQuadraticModel(microsat,null,
expanConst, expanLin, expanQuad, contractConst, contractLin, contractQuad, false);
aqm.computeStationaryDistribution();
double[] statDist = aqm.getStationaryDistribution();
final double[] expectedStatDist = test.getExpectedPi();
for (int k = 0; k < statDist.length; ++k) {
assertEquals(statDist[k], expectedStatDist[k], 1e-10);
}
double[] mat = new double[4*4];
aqm.getTransitionProbabilities(test.getDistance(), mat);
final double[] result = test.getExpectedResult();
int k;
for (k = 0; k < mat.length; ++k) {
assertEquals(result[k], mat[k], 1e-10);
// System.out.print(" " + (mat[k] - result[k]));
}
k = 0;
for(int i = 0; i < microsat.getStateCount(); i ++){
for(int j = 0; j < microsat.getStateCount(); j ++){
assertEquals(result[k++], aqm.getOneTransitionProbabilityEntry(test.getDistance(), i , j), 1e-10);
}
}
for(int j = 0; j < microsat.getStateCount();j ++){
double[] colTransitionProb = aqm.getColTransitionProbabilities(test.getDistance(), j);
for(int i =0 ; i < microsat.getStateCount(); i++){
assertEquals(result[i*microsat.getStateCount()+j], colTransitionProb[i], 1e-10);
}
}
for(int i = 0; i < microsat.getStateCount();i ++){
double[] rowTransitionProb = aqm.getRowTransitionProbabilities(test.getDistance(), i);
for(int j =0 ; j < microsat.getStateCount(); j++){
assertEquals(result[i*microsat.getStateCount()+j], rowTransitionProb[j], 1e-10);
}
}
}
}
}
| lgpl-2.1 |
ACS-Community/ACS | LGPL/CommonSoftware/acsGUIs/objexp/src/si/ijs/acs/objectexplorer/engine/BACI/converters/BACIDoubleLinearConverter.java | 1992 |
package si.ijs.acs.objectexplorer.engine.BACI.converters;
import si.ijs.acs.objectexplorer.engine.BACI.BACIConverterSupport;
import si.ijs.acs.objectexplorer.engine.DataType;
import si.ijs.acs.objectexplorer.engine.BACI.BACIDataType;
/**
* Implementation of BACI double property linear converter, <pre>cx + n</pre>.
*/
public class BACIDoubleLinearConverter extends BACIConverterSupport {
/**
* Coeficient.
*/
protected double c;
/**
* Offset.
*/
protected double n;
/**
* Constructor
* @param c coeficient.
* @param n offset.
*/
public BACIDoubleLinearConverter(double c, double n) {
super();
this.c = c;
this.n = n;
}
/**
* @see si.ijs.acs.objectexplorer.engine.BACI.BACIConverterSupport#convertPropertyValue(java.lang.Object)
*/
public Object convertPropertyValue(Object value) {
return new Double(c * ((Double)value).doubleValue() + n);
}
/**
* @see si.ijs.acs.objectexplorer.engine.BACI.BACIConverterSupport#inverseConvertPropertyValue(java.lang.Object)
*/
public Object inverseConvertPropertyValue(Object value) {
return new Double((((Double)value).doubleValue() - n) / c);
}
/**
* @see si.ijs.acs.objectexplorer.engine.BACI.BACIConverterSupport#getConvertedPropertyValueUnits(String)
*/
public String getConvertedPropertyValueUnits(String units) {
StringBuffer retVal = new StringBuffer(64);
if (c != 0.0)
{
retVal.append(String.valueOf(c));
retVal.append('*');
}
retVal.append(units);
if (n != 0.0)
{
retVal.append('+');
retVal.append(String.valueOf(n));
}
return retVal.toString();
}
/**
* @see si.ijs.acs.objectexplorer.engine.Converter#getDescription()
*/
public String getDescription() {
return "Double BACI property linear converter.";
}
/**
* @see si.ijs.acs.objectexplorer.engine.BACI.BACIConverterSupport#getInverseConvertPropertyParameterType()
*/
public DataType getInverseConvertPropertyParameterType() {
return new BACIDataType(double.class);
}
}
| lgpl-2.1 |
Alfresco/community-edition | projects/remote-api/source/test-java/org/alfresco/rest/workflow/api/tests/ListParser.java | 2229 | /*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.workflow.api.tests;
import static org.junit.Assert.assertNotNull;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.rest.api.tests.client.PublicApiClient.ExpectedPaging;
import org.alfresco.rest.api.tests.client.PublicApiClient.ListResponse;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
public abstract class ListParser<T>
{
public ListResponse<T> parseList(JSONObject jsonResponse)
{
List<T> entries = new ArrayList<T>();
JSONObject jsonList = (JSONObject)jsonResponse.get("list");
assertNotNull(jsonList);
JSONArray jsonEntries = (JSONArray)jsonList.get("entries");
assertNotNull(jsonEntries);
for(int i = 0; i < jsonEntries.size(); i++)
{
JSONObject jsonEntry = (JSONObject)jsonEntries.get(i);
JSONObject entry = (JSONObject)jsonEntry.get("entry");
entries.add(parseEntry(entry));
}
ExpectedPaging paging = ExpectedPaging.parsePagination(jsonList);
ListResponse<T> resp = new ListResponse<T>(paging, entries);
return resp;
}
public abstract T parseEntry(JSONObject entry);
}
| lgpl-3.0 |
Alfresco/community-edition | projects/web-client/source/java/org/alfresco/web/app/servlet/command/BaseUIActionCommand.java | 1454 | /*
* #%L
* Alfresco Repository WAR Community
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.web.app.servlet.command;
import java.util.Map;
import org.alfresco.service.ServiceRegistry;
/**
* @author Kevin Roast
*/
public abstract class BaseUIActionCommand implements Command
{
public static final String PROP_SERVLETCONTEXT = "ServletContext";
public static final String PROP_REQUEST = "Request";
public static final String PROP_RESPONSE = "Response";
}
| lgpl-3.0 |
hardvain/jllvm | src/org/jllvm/LLVMPhiNode.java | 1612 | package org.jllvm;
import org.jllvm.bindings.Core;
import org.jllvm.LLVMInstruction;
import org.jllvm.bindings.SWIGTYPE_p_LLVMOpaqueValue;
import org.jllvm.bindings.SWIGTYPE_p_p_LLVMOpaqueValue;
import org.jllvm.bindings.SWIGTYPE_p_LLVMOpaqueBasicBlock;
import org.jllvm.bindings.SWIGTYPE_p_p_LLVMOpaqueBasicBlock;
public class LLVMPhiNode extends LLVMInstruction {
public LLVMPhiNode(LLVMInstructionBuilder builder,LLVMType type,String name) {
instance = Core.LLVMBuildPhi(builder.getInstance(),type.getInstance(),name);
}
public void addIncoming(LLVMValue[] values,LLVMBasicBlock[] blocks) {
assert(values.length == blocks.length);
SWIGTYPE_p_p_LLVMOpaqueBasicBlock blockArray = Core.new_LLVMBasicBlockRefArray(values.length);
for(int i=0;i<values.length;i++)
Core.LLVMBasicBlockRefArray_setitem(blockArray,i,blocks[i].getBBInstance());
SWIGTYPE_p_p_LLVMOpaqueValue valueArray = Core.new_LLVMValueRefArray(values.length);
for(int i=0;i<values.length;i++)
Core.LLVMValueRefArray_setitem(valueArray,i,values[i].getInstance());
Core.LLVMAddIncoming(instance,valueArray,blockArray,values.length);
Core.delete_LLVMValueRefArray(valueArray);
Core.delete_LLVMBasicBlockRefArray(blockArray);
}
public long countIncoming() {
return Core.LLVMCountIncoming(instance);
}
public LLVMValue getIncomingValue(long index) {
assert(index >= 0);
return LLVMValue.getValue(Core.LLVMGetIncomingValue(instance,index));
}
public LLVMBasicBlock getIncomingBlock(long index) {
assert(index >= 0);
return LLVMBasicBlock.getBasicBlock(Core.LLVMGetIncomingBlock(instance,index));
}
}
| lgpl-3.0 |
moriartyy/elasticsearch160 | src/main/java/org/elasticsearch/action/support/single/shard/TransportShardSingleOperationAction.java | 14754 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.support.single.shard;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException;
/**
* A base class for single shard read operations.
*/
public abstract class TransportShardSingleOperationAction<Request extends SingleShardOperationRequest, Response extends ActionResponse> extends TransportAction<Request, Response> {
protected final ClusterService clusterService;
protected final TransportService transportService;
final String transportShardAction;
final String executor;
protected TransportShardSingleOperationAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) {
super(settings, actionName, threadPool, actionFilters);
this.clusterService = clusterService;
this.transportService = transportService;
this.transportShardAction = actionName + "[s]";
this.executor = executor();
if (!isSubAction()) {
transportService.registerHandler(actionName, new TransportHandler());
}
transportService.registerHandler(transportShardAction, new ShardTransportHandler());
}
/**
* Tells whether the action is a main one or a subaction. Used to decide whether we need to register
* the main transport handler. In fact if the action is a subaction, its execute method
* will be called locally to its parent action.
*/
protected boolean isSubAction() {
return false;
}
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
new AsyncSingleAction(request, listener).start();
}
protected abstract String executor();
protected abstract Response shardOperation(Request request, ShardId shardId) throws ElasticsearchException;
protected abstract Request newRequest();
protected abstract Response newResponse();
protected abstract boolean resolveIndex();
protected ClusterBlockException checkGlobalBlock(ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, request.concreteIndex());
}
protected void resolveRequest(ClusterState state, InternalRequest request) {
}
protected abstract ShardIterator shards(ClusterState state, InternalRequest request) throws ElasticsearchException;
class AsyncSingleAction {
private final ActionListener<Response> listener;
private final ShardIterator shardIt;
private final InternalRequest internalRequest;
private final DiscoveryNodes nodes;
private volatile Throwable lastFailure;
private AsyncSingleAction(Request request, ActionListener<Response> listener) {
this.listener = listener;
ClusterState clusterState = clusterService.state();
if (logger.isTraceEnabled()) {
logger.trace("executing [{}] based on cluster state version [{}]", request, clusterState.version());
}
nodes = clusterState.nodes();
ClusterBlockException blockException = checkGlobalBlock(clusterState);
if (blockException != null) {
throw blockException;
}
String concreteSingleIndex;
if (resolveIndex()) {
concreteSingleIndex = clusterState.metaData().concreteSingleIndex(request.index(), request.indicesOptions());
} else {
concreteSingleIndex = request.index();
}
this.internalRequest = new InternalRequest(request, concreteSingleIndex);
resolveRequest(clusterState, internalRequest);
blockException = checkRequestBlock(clusterState, internalRequest);
if (blockException != null) {
throw blockException;
}
this.shardIt = shards(clusterState, internalRequest);
}
public void start() {
perform(null);
}
private void onFailure(ShardRouting shardRouting, Throwable e) {
if (logger.isTraceEnabled() && e != null) {
logger.trace("{}: failed to execute [{}]", e, shardRouting, internalRequest.request());
}
perform(e);
}
private void perform(@Nullable final Throwable currentFailure) {
Throwable lastFailure = this.lastFailure;
if (lastFailure == null || TransportActions.isReadOverrideException(currentFailure)) {
lastFailure = currentFailure;
this.lastFailure = currentFailure;
}
final ShardRouting shardRouting = shardIt.nextOrNull();
if (shardRouting == null) {
Throwable failure = lastFailure;
if (failure == null || isShardNotAvailableException(failure)) {
failure = new NoShardAvailableActionException(shardIt.shardId(), null, failure);
} else {
if (logger.isDebugEnabled()) {
logger.debug("{}: failed to execute [{}]", failure, shardIt.shardId(), internalRequest.request());
}
}
listener.onFailure(failure);
return;
}
if (shardRouting.currentNodeId().equals(nodes.localNodeId())) {
if (logger.isTraceEnabled()) {
logger.trace("executing [{}] on shard [{}]", internalRequest.request(), shardRouting.shardId());
}
try {
if (internalRequest.request().operationThreaded()) {
threadPool.executor(executor).execute(new Runnable() {
@Override
public void run() {
try {
Response response = shardOperation(internalRequest.request(), shardRouting.shardId());
listener.onResponse(response);
} catch (Throwable e) {
onFailure(shardRouting, e);
}
}
});
} else {
final Response response = shardOperation(internalRequest.request(), shardRouting.shardId());
listener.onResponse(response);
}
} catch (Throwable e) {
onFailure(shardRouting, e);
}
} else {
DiscoveryNode node = nodes.get(shardRouting.currentNodeId());
if (node == null) {
onFailure(shardRouting, new NoShardAvailableActionException(shardIt.shardId()));
} else {
transportService.sendRequest(node, transportShardAction, new ShardSingleOperationRequest(internalRequest.request(), shardRouting.shardId()), new BaseTransportResponseHandler<Response>() {
@Override
public Response newInstance() {
return newResponse();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void handleResponse(final Response response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
onFailure(shardRouting, exp);
}
});
}
}
}
}
private class TransportHandler extends BaseTransportRequestHandler<Request> {
@Override
public Request newInstance() {
return newRequest();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
@Override
public void messageReceived(Request request, final TransportChannel channel) throws Exception {
// no need to have a threaded listener since we just send back a response
request.listenerThreaded(false);
// if we have a local operation, execute it on a thread since we don't spawn
request.operationThreaded(true);
execute(request, new ActionListener<Response>() {
@Override
public void onResponse(Response result) {
try {
channel.sendResponse(result);
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(e);
} catch (Exception e1) {
logger.warn("failed to send response for get", e1);
}
}
});
}
}
private class ShardTransportHandler extends BaseTransportRequestHandler<ShardSingleOperationRequest> {
@Override
public ShardSingleOperationRequest newInstance() {
return new ShardSingleOperationRequest();
}
@Override
public String executor() {
return executor;
}
@Override
public void messageReceived(final ShardSingleOperationRequest request, final TransportChannel channel) throws Exception {
if (logger.isTraceEnabled()) {
logger.trace("executing [{}] on shard [{}]", request.request(), request.shardId());
}
Response response = shardOperation(request.request(), request.shardId());
channel.sendResponse(response);
}
}
class ShardSingleOperationRequest extends TransportRequest implements IndicesRequest {
private Request request;
private ShardId shardId;
ShardSingleOperationRequest() {
}
ShardSingleOperationRequest(Request request, ShardId shardId) {
super(request);
this.request = request;
this.shardId = shardId;
}
public Request request() {
return request;
}
public ShardId shardId() {
return shardId;
}
@Override
public String[] indices() {
return request.indices();
}
@Override
public IndicesOptions indicesOptions() {
return request.indicesOptions();
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request = newRequest();
request.readFrom(in);
if (in.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
shardId = ShardId.readShardId(in);
} else {
//older nodes will send the concrete index as part of the request
shardId = new ShardId(request.index(), in.readVInt());
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getVersion().before(Version.V_1_4_0_Beta1)) {
//older nodes expect the concrete index as part of the request
request.index(shardId.getIndex());
}
request.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_1_4_0_Beta1)) {
shardId.writeTo(out);
} else {
out.writeVInt(shardId.id());
}
}
}
/**
* Internal request class that gets built on each node. Holds the original request plus additional info.
*/
protected class InternalRequest {
final Request request;
final String concreteIndex;
InternalRequest(Request request, String concreteIndex) {
this.request = request;
this.concreteIndex = concreteIndex;
}
public Request request() {
return request;
}
public String concreteIndex() {
return concreteIndex;
}
}
}
| apache-2.0 |
mrluo735/cas-5.1.0 | support/cas-server-support-pm/src/main/java/org/apereo/cas/pm/rest/RestPasswordManagementService.java | 4240 | package org.apereo.cas.pm.rest;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.CipherExecutor;
import org.apereo.cas.authentication.Credential;
import org.apereo.cas.authentication.UsernamePasswordCredential;
import org.apereo.cas.configuration.model.support.pm.PasswordManagementProperties;
import org.apereo.cas.pm.BasePasswordManagementService;
import org.apereo.cas.pm.PasswordChangeBean;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestTemplate;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Map;
/**
* This is {@link RestPasswordManagementService}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
public class RestPasswordManagementService extends BasePasswordManagementService {
private final RestTemplate restTemplate;
public RestPasswordManagementService(final CipherExecutor<Serializable, String> cipherExecutor,
final String issuer,
final RestTemplate restTemplate,
final PasswordManagementProperties passwordManagementProperties) {
super(cipherExecutor, issuer, passwordManagementProperties);
this.restTemplate = restTemplate;
}
@Override
public boolean change(final Credential c, final PasswordChangeBean bean) {
final PasswordManagementProperties.Rest rest = passwordManagementProperties.getRest();
if (StringUtils.isBlank(rest.getEndpointUrlChange())) {
return false;
}
final UsernamePasswordCredential upc = (UsernamePasswordCredential) c;
final HttpHeaders headers = new HttpHeaders();
headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON));
headers.put("username", Arrays.asList(upc.getUsername()));
headers.put("password", Arrays.asList(bean.getPassword()));
headers.put("oldPassword", Arrays.asList(upc.getPassword()));
final HttpEntity<String> entity = new HttpEntity<>(headers);
final ResponseEntity<Boolean> result = restTemplate.exchange(rest.getEndpointUrlEmail(), HttpMethod.POST, entity, Boolean.class);
if (result.getStatusCodeValue() == HttpStatus.OK.value()) {
return result.getBody();
}
return false;
}
@Override
public String findEmail(final String username) {
final PasswordManagementProperties.Rest rest = passwordManagementProperties.getRest();
if (StringUtils.isBlank(rest.getEndpointUrlEmail())) {
return null;
}
final HttpHeaders headers = new HttpHeaders();
headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON));
headers.put("username", Arrays.asList(username));
final HttpEntity<String> entity = new HttpEntity<>(headers);
final ResponseEntity<String> result = restTemplate.exchange(rest.getEndpointUrlEmail(), HttpMethod.GET, entity, String.class);
if (result.getStatusCodeValue() == HttpStatus.OK.value() && result.hasBody()) {
return result.getBody();
}
return null;
}
@Override
public Map<String, String> getSecurityQuestions(final String username) {
final PasswordManagementProperties.Rest rest = passwordManagementProperties.getRest();
if (StringUtils.isBlank(rest.getEndpointUrlSecurityQuestions())) {
return null;
}
final HttpHeaders headers = new HttpHeaders();
headers.setAccept(Arrays.asList(MediaType.APPLICATION_JSON));
headers.put("username", Arrays.asList(username));
final HttpEntity<String> entity = new HttpEntity<>(headers);
final ResponseEntity<Map> result = restTemplate.exchange(rest.getEndpointUrlSecurityQuestions(),
HttpMethod.GET, entity, Map.class);
if (result.getStatusCodeValue() == HttpStatus.OK.value() && result.hasBody()) {
return result.getBody();
}
return null;
}
}
| apache-2.0 |
chtyim/cdap | cdap-examples/WordCount/src/main/java/co/cask/cdap/examples/wordcount/WordCounter.java | 2430 | /*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.examples.wordcount;
import co.cask.cdap.api.flow.AbstractFlow;
/**
* Flow that takes any arbitrary string of input and performs word statistics.
* <p>
* Flow parses input string into individual words, then performs per-word counts
* and other calculations like total number of words seen, average length
* of words seen, unique words seen, and also tracks the words most often
* associated with each other word.
* <p>
* The first Flowlet is the WordSplitter, which splits the sentence into
* individual words, cleans up non-alpha characters, and then sends the
* sentences to the WordAssociater and the words on to the WordCounter.
* <p>
* The next Flowlet is the WordAssociater that will track word associations
* between all of the words within the input string.
* <p>
* The next Flowlet is the Counter, which performs the necessary data
* operations to do the word count and count other word statistics.
* <p>
* The last Flowlet is the UniqueCounter, which calculates and updates the
* unique number of words seen.
*/
public class WordCounter extends AbstractFlow {
private final WordCount.WordCountConfig config;
public WordCounter(WordCount.WordCountConfig config) {
this.config = config;
}
@Override
protected void configure() {
setName("WordCounter");
setDescription("Example Word Count Flow");
addFlowlet("splitter", new WordSplitter(config.getWordStatsTable()));
addFlowlet("associator", new WordAssociator(config.getWordAssocTable()));
addFlowlet("counter", new Counter(config.getWordCountTable()));
addFlowlet("unique", new UniqueCounter(config.getUniqueCountTable()));
connectStream(config.getStream(), "splitter");
connect("splitter", "associator");
connect("splitter", "counter");
connect("counter", "unique");
}
}
| apache-2.0 |
allotria/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInsight/daemon/quickFix/I18nQuickFixTest.java | 2342 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.quickFix.LightQuickFixParameterizedTestCase;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.i18n.I18nInspection;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.impl.PsiDocumentManagerBase;
import org.jetbrains.annotations.NotNull;
/**
* @author yole
*/
public class I18nQuickFixTest extends LightQuickFixParameterizedTestCase {
private boolean myMustBeAvailableAfterInvoke;
@Override
protected LocalInspectionTool @NotNull [] configureLocalInspectionTools() {
return new LocalInspectionTool[]{new I18nInspection()};
}
@Override
protected String getBasePath() {
return "/codeInsight/daemonCodeAnalyzer/quickFix/i18n";
}
@Override
protected void beforeActionStarted(final String testName, final String contents) {
myMustBeAvailableAfterInvoke = Comparing.strEqual(testName, "SystemCall.java");
}
@Override
protected void tearDown() throws Exception {
// avoid "memory/disk conflict" when the document for changed annotation.xml stays in memory
((PsiDocumentManagerBase)PsiDocumentManager.getInstance(getProject())).clearUncommittedDocuments();
super.tearDown();
}
@Override
public void runSingle() throws Throwable {
VfsGuardian.guard(FileUtil.toSystemIndependentName(PathManager.getCommunityHomePath()), getTestRootDisposable());
super.runSingle();
}
@Override
protected boolean shouldBeAvailableAfterExecution() {
return myMustBeAvailableAfterInvoke;
}
}
| apache-2.0 |
Donnerbart/hazelcast | hazelcast/src/main/java/com/hazelcast/config/XmlElements.java | 3141 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.config;
enum XmlElements {
HAZELCAST("hazelcast", false),
INSTANCE_NAME("instance-name", false),
IMPORT("import", true),
CONFIG_REPLACERS("config-replacers", false),
GROUP("group", false),
LICENSE_KEY("license-key", false),
MANAGEMENT_CENTER("management-center", false),
PROPERTIES("properties", false),
WAN_REPLICATION("wan-replication", true),
NETWORK("network", false),
PARTITION_GROUP("partition-group", false),
EXECUTOR_SERVICE("executor-service", true),
DURABLE_EXECUTOR_SERVICE("durable-executor-service", true),
SCHEDULED_EXECUTOR_SERVICE("scheduled-executor-service", true),
EVENT_JOURNAL("event-journal", true),
MERKLE_TREE("merkle-tree", true),
QUEUE("queue", true),
MAP("map", true),
CACHE("cache", true),
MULTIMAP("multimap", true),
REPLICATED_MAP("replicatedmap", true),
LIST("list", true),
SET("set", true),
TOPIC("topic", true),
RELIABLE_TOPIC("reliable-topic", true),
JOB_TRACKER("jobtracker", true),
SEMAPHORE("semaphore", true),
LOCK("lock", true),
RINGBUFFER("ringbuffer", true),
ATOMIC_LONG("atomic-long", true),
ATOMIC_REFERENCE("atomic-reference", true),
COUNT_DOWN_LATCH("count-down-latch", true),
LISTENERS("listeners", false),
SERIALIZATION("serialization", false),
SERVICES("services", false),
SECURITY("security", false),
MEMBER_ATTRIBUTES("member-attributes", false),
NATIVE_MEMORY("native-memory", false),
QUORUM("quorum", true),
LITE_MEMBER("lite-member", false),
HOT_RESTART_PERSISTENCE("hot-restart-persistence", false),
USER_CODE_DEPLOYMENT("user-code-deployment", false),
CARDINALITY_ESTIMATOR("cardinality-estimator", true),
RELIABLE_ID_GENERATOR("reliable-id-generator", true),
FLAKE_ID_GENERATOR("flake-id-generator", true),
CRDT_REPLICATION("crdt-replication", false),
PN_COUNTER("pn-counter", true),
;
final String name;
final boolean multipleOccurrence;
XmlElements(String name, boolean multipleOccurrence) {
this.name = name;
this.multipleOccurrence = multipleOccurrence;
}
public static boolean canOccurMultipleTimes(String name) {
for (XmlElements element : values()) {
if (name.equals(element.name)) {
return element.multipleOccurrence;
}
}
return false;
}
public boolean isEqual(String name) {
return this.name.equals(name);
}
}
| apache-2.0 |
impetus-opensource/Kundera | src/kundera-cassandra/cassandra-ds-driver/src/test/java/com/impetus/client/cassandra/udt/ProfessionalDetailsUDT.java | 3767 | /*******************************************************************************
* * Copyright 2015 Impetus Infotech.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
******************************************************************************/
/*
* author: karthikp.manchala
*/
package com.impetus.client.cassandra.udt;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Embeddable;
/**
* The Class ProfessionalDetailsUDT.
*/
@Embeddable
public class ProfessionalDetailsUDT
{
/** The company. */
@Column
private String company;
/** The project. */
@Column
private List<Integer> extentions;
/** The colleagues. */
@Column
private Set<String> colleagues;
/** The projects. */
@Column
private Map<Integer, String> projects;
/** The grade. */
@Column
private String grade;
/** The monthly salary. */
@Column(name = "monthly_salary")
private Double monthlySalary;
/**
* Gets the company.
*
* @return the company
*/
public String getCompany()
{
return company;
}
/**
* Sets the company.
*
* @param company
* the new company
*/
public void setCompany(String company)
{
this.company = company;
}
/**
* Gets the extentions.
*
* @return the extentions
*/
public List<Integer> getExtentions()
{
return extentions;
}
/**
* Sets the extentions.
*
* @param extentions
* the new extentions
*/
public void setExtentions(List<Integer> extentions)
{
this.extentions = extentions;
}
/**
* Gets the colleagues.
*
* @return the colleagues
*/
public Set<String> getColleagues()
{
return colleagues;
}
/**
* Sets the colleagues.
*
* @param colleagues
* the new colleagues
*/
public void setColleagues(Set<String> colleagues)
{
this.colleagues = colleagues;
}
/**
* Gets the projects.
*
* @return the projects
*/
public Map<Integer, String> getProjects()
{
return projects;
}
/**
* Sets the projects.
*
* @param projects
* the projects
*/
public void setProjects(Map<Integer, String> projects)
{
this.projects = projects;
}
/**
* Gets the grade.
*
* @return the grade
*/
public String getGrade()
{
return grade;
}
/**
* Sets the grade.
*
* @param grade
* the new grade
*/
public void setGrade(String grade)
{
this.grade = grade;
}
/**
* Gets the monthly salary.
*
* @return the monthly salary
*/
public Double getMonthlySalary()
{
return monthlySalary;
}
/**
* Sets the monthly salary.
*
* @param monthlySalary
* the new monthly salary
*/
public void setMonthlySalary(Double monthlySalary)
{
this.monthlySalary = monthlySalary;
}
}
| apache-2.0 |
zwets/flowable-engine | modules/flowable-engine-common/src/main/java/org/flowable/engine/common/impl/persistence/GenericManagerFactory.java | 1866 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.common.impl.persistence;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.engine.common.impl.interceptor.Session;
import org.flowable.engine.common.impl.interceptor.SessionFactory;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class GenericManagerFactory implements SessionFactory {
protected Class<? extends Session> typeClass;
protected Class<? extends Session> implementationClass;
public GenericManagerFactory(Class<? extends Session> typeClass, Class<? extends Session> implementationClass) {
this.typeClass = typeClass;
this.implementationClass = implementationClass;
}
public GenericManagerFactory(Class<? extends Session> implementationClass) {
this(implementationClass, implementationClass);
}
@Override
public Class<?> getSessionType() {
return typeClass;
}
@Override
public Session openSession(CommandContext commandContext) {
try {
return implementationClass.newInstance();
} catch (Exception e) {
throw new FlowableException("couldn't instantiate " + implementationClass.getName() + ": " + e.getMessage(), e);
}
}
}
| apache-2.0 |
nianma/fcrepo4 | fcrepo-integration-rdf/src/test/java/org/fcrepo/integration/rdf/AbstractIntegrationRdfIT.java | 6615 | /**
* Copyright 2015 DuraSpace, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.integration.rdf;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.NodeFactory;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.sparql.graph.GraphFactory;
import com.hp.hpl.jena.update.GraphStore;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.BasicHttpEntity;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.riot.RDFLanguages;
import org.fcrepo.integration.http.api.AbstractResourceIT;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static javax.ws.rs.core.Response.Status.CREATED;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* @author cabeer
* @author ajs6f
*/
public abstract class AbstractIntegrationRdfIT extends AbstractResourceIT {
protected HttpResponse createLDPRSAndCheckResponse(final String pid, final String body) {
try {
final HttpPut httpPut = new HttpPut(serverAddress + pid);
httpPut.addHeader("Slug", pid);
httpPut.addHeader("Content-Type", "text/turtle");
final BasicHttpEntity e = new BasicHttpEntity();
e.setContent(IOUtils.toInputStream(body));
httpPut.setEntity(e);
final HttpResponse response = client.execute(httpPut);
checkResponse(response, CREATED);
final String location = response.getFirstHeader("Location").getValue();
final HttpGet httpGet = new HttpGet(location);
httpGet.addHeader("Prefer", "return=representation; " +
"include=\"http://www.w3.org/ns/ldp#PreferMinimalContainer\"; " +
"omit=\"http://fedora.info/definitions/v4/repository#ServerManaged\"");
final GraphStore graphStore = getGraphStore(httpGet);
assertFalse(graphStore.isEmpty());
final Graph tidiedGraph = getTidiedGraph(graphStore);
final Model expected = ModelFactory.createDefaultModel().read(IOUtils.toInputStream(body), location, "TTL");
final boolean isomorphicWith = tidiedGraph.isIsomorphicWith(getTidiedGraph(expected.getGraph()));
final String description;
if (!isomorphicWith) {
final ByteArrayOutputStream o = new ByteArrayOutputStream();
final Model tidiedModel = ModelFactory.createModelForGraph(tidiedGraph);
tidiedModel.setNsPrefixes(expected.getNsPrefixMap());
o.write("Expected: ".getBytes());
RDFDataMgr.write(o, expected, RDFLanguages.TTL);
o.write("to be isomorphic with: ".getBytes());
RDFDataMgr.write(o, tidiedModel, RDFLanguages.TTL);
description = IOUtils.toString(o.toByteArray(), "UTF-8");
} else {
description = "";
}
assertTrue(description, isomorphicWith);
return response;
} catch (final IOException e) {
assertTrue("Got IOException " + e, false);
return null;
}
}
private static Graph getTidiedGraph(final GraphStore graphStore) {
return getTidiedGraph(graphStore.getDefaultGraph());
}
private static Graph getTidiedGraph(final Graph graph) {
final Graph betterGraph = GraphFactory.createDefaultGraph();
final ExtendedIterator<Triple> triples = graph.find(Node.ANY, Node.ANY, Node.ANY);
final Map<Node, Node> bnodeMap = new HashMap<>();
while (triples.hasNext()) {
final Triple next = triples.next();
Triple replacement = next;
if (replacement.getSubject().toString().contains(".well-known")) {
if (!bnodeMap.containsKey(replacement.getSubject())) {
bnodeMap.put(replacement.getSubject(), NodeFactory.createAnon());
}
replacement = new Triple(bnodeMap.get(replacement.getSubject()),
replacement.getPredicate(),
replacement.getObject());
}
if (replacement.getObject().toString().contains(".well-known")) {
if (!bnodeMap.containsKey(replacement.getObject())) {
bnodeMap.put(replacement.getObject(), NodeFactory.createAnon());
}
replacement = new Triple(replacement.getSubject(),
replacement.getPredicate(),
bnodeMap.get(replacement.getObject()));
}
if (replacement.getObject().isLiteral()
&& replacement.getObject().getLiteral().getDatatype() != null
&& replacement.getObject().getLiteral().getDatatype().equals(XSDDatatype.XSDstring)) {
replacement = new Triple(replacement.getSubject(),
replacement.getPredicate(),
NodeFactory.createLiteral(replacement.getObject().getLiteral().getLexicalForm()));
}
betterGraph.add(replacement);
}
return betterGraph;
}
protected void checkResponse(final HttpResponse response, final Response.StatusType expected) {
final int actual = response.getStatusLine().getStatusCode();
assertEquals("Didn't get a CREATED response!", expected.getStatusCode(), actual);
}
protected String getContentFromClasspath(final String path) throws IOException {
return IOUtils.toString(this.getClass().getResourceAsStream(path));
}
}
| apache-2.0 |
prasi-in/geode | geode-core/src/test/java/org/apache/geode/cache/client/internal/SingleHopClientExecutorSubmitTaskWithExceptionTest.java | 2112 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.client.internal;
import java.util.concurrent.TimeUnit;
import org.awaitility.Awaitility;
import org.apache.geode.test.junit.categories.UnitTest;
import org.junit.Rule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.SystemErrRule;
import org.junit.experimental.categories.Category;
/**
* Test if exceptions are logged when thread is submitted using
* {@code SingleHopClientExecutor#submitTask} method.
*/
@Category(UnitTest.class)
public class SingleHopClientExecutorSubmitTaskWithExceptionTest {
@Rule
public SystemErrRule systemErrRule = new SystemErrRule().enableLog();
/**
* Refer: GEODE-2109 This test verifies that any exception thrown from forked thread is logged
* into log.
*/
@Test
public void submittedTaskShouldLogFailure() {
String erroMsg = "I am expecting this to be logged";
SingleHopClientExecutor.submitTask(new Runnable() {
@Override
public void run() {
// test piece throwing exception
throw new RuntimeException(erroMsg);
}
});
/**
* Sometimes need to wait for more than sec as thread execution takes time.
*/
Awaitility.await("Waiting for exception").atMost(60l, TimeUnit.SECONDS).until(() -> {
systemErrRule.getLog().contains(erroMsg);
});
}
}
| apache-2.0 |
rpiotti/Web-Karma | karma-commands/commands-publish-alignment-openrdf/src/main/java/edu/isi/karma/controller/command/worksheet/FetchColumnCommandFactory.java | 2290 | /*******************************************************************************
* Copyright 2012 University of Southern California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This code was developed by the Information Integration Group as part
* of the Karma project at the Information Sciences Institute of the
* University of Southern California. For more information, publications,
* and related projects, please see: http://www.isi.edu/integration
******************************************************************************/
package edu.isi.karma.controller.command.worksheet;
import javax.servlet.http.HttpServletRequest;
import edu.isi.karma.controller.command.Command;
import edu.isi.karma.controller.command.CommandFactory;
import edu.isi.karma.rep.Workspace;
public class FetchColumnCommandFactory extends CommandFactory {
public enum Arguments {
worksheetId, alignmentNodeId, tripleStoreUrl,
graphUrl, nodeId, selectionName
}
@Override
public Command createCommand(HttpServletRequest request,
Workspace workspace) {
String worksheetId = request.getParameter(Arguments.worksheetId.name());
String alignmentNodeId = request.getParameter(Arguments.alignmentNodeId.name());
String tripleStoreUrl = request.getParameter(Arguments.tripleStoreUrl.name());
String graphUrl = request.getParameter(Arguments.graphUrl.name());
String nodeId = request.getParameter(Arguments.nodeId.name());
String selectionName = request.getParameter(Arguments.selectionName.name());
return new FetchColumnCommand(getNewId(workspace), worksheetId, alignmentNodeId,
tripleStoreUrl, graphUrl, nodeId,
selectionName);
}
@Override
public Class<? extends Command> getCorrespondingCommand() {
return FetchColumnCommand.class;
}
}
| apache-2.0 |
yuyijq/dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/TransportersTest.java | 2140 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.remoting;
import org.apache.dubbo.common.URL;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
public class TransportersTest {
private String url = "dubbo://127.0.0.1:12345?transporter=mockTransporter";
private ChannelHandler channel = Mockito.mock(ChannelHandler.class);
@Test
public void testBind() throws RemotingException {
Assertions.assertThrows(RuntimeException.class, () -> Transporters.bind((String) null));
Assertions.assertThrows(RuntimeException.class, () -> Transporters.bind((URL) null));
Assertions.assertThrows(RuntimeException.class, () -> Transporters.bind(url));
Assertions.assertNotNull(Transporters.bind(url, channel));
Assertions.assertNotNull(Transporters.bind(url, channel, channel));
}
@Test
public void testConnect() throws RemotingException {
Assertions.assertThrows(RuntimeException.class, () -> Transporters.connect((String) null));
Assertions.assertThrows(RuntimeException.class, () -> Transporters.connect((URL) null));
Assertions.assertNotNull(Transporters.connect(url));
Assertions.assertNotNull(Transporters.connect(url, channel));
Assertions.assertNotNull(Transporters.connect(url, channel, channel));
}
}
| apache-2.0 |
Vishwa1311/incubator-fineract | fineract-provider/src/main/java/org/apache/fineract/accounting/journalentry/api/JournalEntryJsonInputParams.java | 2198 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.accounting.journalentry.api;
import java.util.HashSet;
import java.util.Set;
/***
* Enum of all parameters passed in while creating/updating a journal Entry
***/
public enum JournalEntryJsonInputParams {
OFFICE_ID("officeId"), TRANSACTION_DATE("transactionDate"), COMMENTS("comments"), CREDITS("credits"), DEBITS("debits"), LOCALE("locale"), DATE_FORMAT(
"dateFormat"), REFERENCE_NUMBER("referenceNumber"), USE_ACCOUNTING_RULE("useAccountingRule"), ACCOUNTING_RULE("accountingRule"), AMOUNT(
"amount"), CURRENCY_CODE("currencyCode"), PAYMENT_TYPE_ID("paymentTypeId"), ACCOUNT_NUMBER("accountNumber"), CHECK_NUMBER(
"checkNumber"), ROUTING_CODE("routingCode"), RECEIPT_NUMBER("receiptNumber"), BANK_NUMBER("bankNumber");
private final String value;
private JournalEntryJsonInputParams(final String value) {
this.value = value;
}
private static final Set<String> values = new HashSet<>();
static {
for (final JournalEntryJsonInputParams type : JournalEntryJsonInputParams.values()) {
values.add(type.value);
}
}
public static Set<String> getAllValues() {
return values;
}
@Override
public String toString() {
return name().toString().replaceAll("_", " ");
}
public String getValue() {
return this.value;
}
} | apache-2.0 |
treasure-data/presto | presto-main/src/main/java/io/prestosql/sql/planner/optimizations/QueryCardinalityUtil.java | 7637 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.optimizations;
import com.google.common.collect.Range;
import io.prestosql.sql.planner.iterative.GroupReference;
import io.prestosql.sql.planner.iterative.Lookup;
import io.prestosql.sql.planner.plan.AggregationNode;
import io.prestosql.sql.planner.plan.EnforceSingleRowNode;
import io.prestosql.sql.planner.plan.ExchangeNode;
import io.prestosql.sql.planner.plan.FilterNode;
import io.prestosql.sql.planner.plan.LimitNode;
import io.prestosql.sql.planner.plan.OffsetNode;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.PlanVisitor;
import io.prestosql.sql.planner.plan.ProjectNode;
import io.prestosql.sql.planner.plan.TopNNode;
import io.prestosql.sql.planner.plan.ValuesNode;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.prestosql.sql.planner.iterative.Lookup.noLookup;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.util.Objects.requireNonNull;
public final class QueryCardinalityUtil
{
private QueryCardinalityUtil() {}
public static boolean isScalar(PlanNode node)
{
return isScalar(node, noLookup());
}
public static boolean isScalar(PlanNode node, Lookup lookup)
{
return Range.singleton(1L).encloses(extractCardinality(node, lookup));
}
public static boolean isAtMostScalar(PlanNode node)
{
return isAtMostScalar(node, noLookup());
}
public static boolean isAtMostScalar(PlanNode node, Lookup lookup)
{
return isAtMost(node, lookup, 1L);
}
public static boolean isAtMost(PlanNode node, Lookup lookup, long maxCardinality)
{
return Range.closed(0L, maxCardinality).encloses(extractCardinality(node, lookup));
}
public static boolean isAtLeastScalar(PlanNode node, Lookup lookup)
{
return isAtLeast(node, lookup, 1L);
}
public static boolean isAtLeast(PlanNode node, Lookup lookup, long minCardinality)
{
return Range.atLeast(minCardinality).encloses(extractCardinality(node, lookup));
}
public static boolean isEmpty(PlanNode node, Lookup lookup)
{
return isAtMost(node, lookup, 0);
}
public static Range<Long> extractCardinality(PlanNode node)
{
return extractCardinality(node, noLookup());
}
public static Range<Long> extractCardinality(PlanNode node, Lookup lookup)
{
return node.accept(new CardinalityExtractorPlanVisitor(lookup), null);
}
private static final class CardinalityExtractorPlanVisitor
extends PlanVisitor<Range<Long>, Void>
{
private final Lookup lookup;
public CardinalityExtractorPlanVisitor(Lookup lookup)
{
this.lookup = requireNonNull(lookup, "lookup is null");
}
@Override
protected Range<Long> visitPlan(PlanNode node, Void context)
{
return Range.atLeast(0L);
}
@Override
public Range<Long> visitGroupReference(GroupReference node, Void context)
{
return lookup.resolve(node).accept(this, context);
}
@Override
public Range<Long> visitEnforceSingleRow(EnforceSingleRowNode node, Void context)
{
return Range.singleton(1L);
}
@Override
public Range<Long> visitAggregation(AggregationNode node, Void context)
{
if (node.hasEmptyGroupingSet() && node.getGroupingSetCount() == 1) {
// only single default aggregation which will produce exactly single row
return Range.singleton(1L);
}
Range<Long> sourceCardinalityRange = node.getSource().accept(this, null);
long lower;
if (node.hasDefaultOutput() || sourceCardinalityRange.lowerEndpoint() > 0) {
lower = 1;
}
else {
lower = 0;
}
if (sourceCardinalityRange.hasUpperBound()) {
long upper = Math.max(lower, sourceCardinalityRange.upperEndpoint());
return Range.closed(lower, upper);
}
return Range.atLeast(lower);
}
@Override
public Range<Long> visitExchange(ExchangeNode node, Void context)
{
if (node.getSources().size() == 1) {
return getOnlyElement(node.getSources()).accept(this, null);
}
return Range.atLeast(0L);
}
@Override
public Range<Long> visitProject(ProjectNode node, Void context)
{
return node.getSource().accept(this, null);
}
@Override
public Range<Long> visitFilter(FilterNode node, Void context)
{
Range<Long> sourceCardinalityRange = node.getSource().accept(this, null);
if (sourceCardinalityRange.hasUpperBound()) {
return Range.closed(0L, sourceCardinalityRange.upperEndpoint());
}
return Range.atLeast(0L);
}
@Override
public Range<Long> visitValues(ValuesNode node, Void context)
{
return Range.singleton((long) node.getRows().size());
}
@Override
public Range<Long> visitOffset(OffsetNode node, Void context)
{
Range<Long> sourceCardinalityRange = node.getSource().accept(this, null);
long lower = max(sourceCardinalityRange.lowerEndpoint() - node.getCount(), 0L);
if (sourceCardinalityRange.hasUpperBound()) {
return Range.closed(lower, max(sourceCardinalityRange.upperEndpoint() - node.getCount(), 0L));
}
else {
return Range.atLeast(lower);
}
}
@Override
public Range<Long> visitLimit(LimitNode node, Void context)
{
if (node.isWithTies()) {
Range<Long> sourceCardinalityRange = node.getSource().accept(this, null);
long lower = min(node.getCount(), sourceCardinalityRange.lowerEndpoint());
if (sourceCardinalityRange.hasUpperBound()) {
return Range.closed(lower, sourceCardinalityRange.upperEndpoint());
}
else {
return Range.atLeast(lower);
}
}
return applyLimit(node.getSource(), node.getCount());
}
@Override
public Range<Long> visitTopN(TopNNode node, Void context)
{
return applyLimit(node.getSource(), node.getCount());
}
private Range<Long> applyLimit(PlanNode source, long limit)
{
Range<Long> sourceCardinalityRange = source.accept(this, null);
if (sourceCardinalityRange.hasUpperBound()) {
limit = min(sourceCardinalityRange.upperEndpoint(), limit);
}
long lower = min(limit, sourceCardinalityRange.lowerEndpoint());
return Range.closed(lower, limit);
}
}
}
| apache-2.0 |
thirdy/TomP2P | core/src/main/java/net/tomp2p/connection/PeerException.java | 2320 | /*
* Copyright 2009 Thomas Bocek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package net.tomp2p.connection;
import net.tomp2p.futures.FutureResponse;
/**
* This exception is used internally and passed over to the method
* exceptionCaught. A PeerException always has a cause
*
* @author Thomas Bocek
*/
public class PeerException extends Exception {
private static final long serialVersionUID = 3710790196087629945L;
/**
* USER_ABORT means that this peer aborts the communication. PEER_ERROR
* means that the other peer did not react as expected (e.g., no reply).
* PEER_ABORT means that the other peer found an error on our side (e.g., if
* this peer thinks the other peer is someone else).
*
* @author Thomas Bocek
*/
public enum AbortCause {
USER_ABORT, PEER_ERROR, PEER_ABORT, TIMEOUT, SHUTDOWN, PROBABLY_OFFLINE
}
private final AbortCause abortCause;
/**
* Specified error with custom message.
*
* @param abortCause
* Either USER_ABORT, PEER_ERROR, PEER_ABORT, or TIMEOUT.
* @param message
* Custom message.
*/
public PeerException(final AbortCause abortCause, final String message) {
super(message);
this.abortCause = abortCause;
}
public PeerException(FutureResponse future) {
super(future.failedReason());
this.abortCause = AbortCause.PEER_ERROR;
}
public PeerException(Throwable cause) {
super(cause);
this.abortCause = AbortCause.PEER_ERROR;
}
/**
* @return The cause of the exception.
*/
public AbortCause abortCause() {
return abortCause;
}
@Override
public String toString() {
return "PeerException (" + abortCause.toString() + "): " + getMessage();
}
}
| apache-2.0 |
bysslord/hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestMover.java | 11087 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.mover;
import java.io.IOException;
import java.net.URI;
import java.util.*;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.balancer.Dispatcher.DBlock;
import org.apache.hadoop.hdfs.server.balancer.NameNodeConnector;
import org.apache.hadoop.hdfs.server.mover.Mover.MLocation;
import org.apache.hadoop.hdfs.server.namenode.ha.HATestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ToolRunner;
import org.junit.Assert;
import org.junit.Test;
public class TestMover {
static Mover newMover(Configuration conf) throws IOException {
final Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(1, namenodes.size());
Map<URI, List<Path>> nnMap = Maps.newHashMap();
for (URI nn : namenodes) {
nnMap.put(nn, null);
}
final List<NameNodeConnector> nncs = NameNodeConnector.newNameNodeConnectors(
nnMap, Mover.class.getSimpleName(), Mover.MOVER_ID_PATH, conf,
NameNodeConnector.DEFAULT_MAX_IDLE_ITERATIONS);
return new Mover(nncs.get(0), conf);
}
@Test
public void testScheduleSameBlock() throws IOException {
final Configuration conf = new HdfsConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(4).build();
try {
cluster.waitActive();
final DistributedFileSystem dfs = cluster.getFileSystem();
final String file = "/testScheduleSameBlock/file";
{
final FSDataOutputStream out = dfs.create(new Path(file));
out.writeChars("testScheduleSameBlock");
out.close();
}
final Mover mover = newMover(conf);
mover.init();
final Mover.Processor processor = mover.new Processor();
final LocatedBlock lb = dfs.getClient().getLocatedBlocks(file, 0).get(0);
final List<MLocation> locations = MLocation.toLocations(lb);
final MLocation ml = locations.get(0);
final DBlock db = mover.newDBlock(lb.getBlock().getLocalBlock(), locations);
final List<StorageType> storageTypes = new ArrayList<StorageType>(
Arrays.asList(StorageType.DEFAULT, StorageType.DEFAULT));
Assert.assertTrue(processor.scheduleMoveReplica(db, ml, storageTypes));
Assert.assertFalse(processor.scheduleMoveReplica(db, ml, storageTypes));
} finally {
cluster.shutdown();
}
}
@Test
public void testScheduleBlockWithinSameNode() throws Exception {
final Configuration conf = new HdfsConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(3)
.storageTypes(
new StorageType[] { StorageType.DISK, StorageType.ARCHIVE })
.build();
try {
cluster.waitActive();
final DistributedFileSystem dfs = cluster.getFileSystem();
final String file = "/testScheduleWithinSameNode/file";
Path dir = new Path("/testScheduleWithinSameNode");
dfs.mkdirs(dir);
// write to DISK
dfs.setStoragePolicy(dir, "HOT");
{
final FSDataOutputStream out = dfs.create(new Path(file));
out.writeChars("testScheduleWithinSameNode");
out.close();
}
//verify before movement
LocatedBlock lb = dfs.getClient().getLocatedBlocks(file, 0).get(0);
StorageType[] storageTypes = lb.getStorageTypes();
for (StorageType storageType : storageTypes) {
Assert.assertTrue(StorageType.DISK == storageType);
}
// move to ARCHIVE
dfs.setStoragePolicy(dir, "COLD");
int rc = ToolRunner.run(conf, new Mover.Cli(),
new String[] { "-p", dir.toString() });
Assert.assertEquals("Movement to ARCHIVE should be successfull", 0, rc);
// Wait till namenode notified
Thread.sleep(3000);
lb = dfs.getClient().getLocatedBlocks(file, 0).get(0);
storageTypes = lb.getStorageTypes();
for (StorageType storageType : storageTypes) {
Assert.assertTrue(StorageType.ARCHIVE == storageType);
}
} finally {
cluster.shutdown();
}
}
private void checkMovePaths(List<Path> actual, Path... expected) {
Assert.assertEquals(expected.length, actual.size());
for (Path p : expected) {
Assert.assertTrue(actual.contains(p));
}
}
/**
* Test Mover Cli by specifying a list of files/directories using option "-p".
* There is only one namenode (and hence name service) specified in the conf.
*/
@Test
public void testMoverCli() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster
.Builder(new HdfsConfiguration()).numDataNodes(0).build();
try {
final Configuration conf = cluster.getConfiguration(0);
try {
Mover.Cli.getNameNodePathsToMove(conf, "-p", "/foo", "bar");
Assert.fail("Expected exception for illegal path bar");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("bar is not absolute", e);
}
Map<URI, List<Path>> movePaths = Mover.Cli.getNameNodePathsToMove(conf);
Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(1, namenodes.size());
Assert.assertEquals(1, movePaths.size());
URI nn = namenodes.iterator().next();
Assert.assertTrue(movePaths.containsKey(nn));
Assert.assertNull(movePaths.get(nn));
movePaths = Mover.Cli.getNameNodePathsToMove(conf, "-p", "/foo", "/bar");
namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(1, movePaths.size());
nn = namenodes.iterator().next();
Assert.assertTrue(movePaths.containsKey(nn));
checkMovePaths(movePaths.get(nn), new Path("/foo"), new Path("/bar"));
} finally {
cluster.shutdown();
}
}
@Test
public void testMoverCliWithHAConf() throws Exception {
final Configuration conf = new HdfsConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster
.Builder(new HdfsConfiguration())
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(0).build();
HATestUtil.setFailoverConfigurations(cluster, conf, "MyCluster");
try {
Map<URI, List<Path>> movePaths = Mover.Cli.getNameNodePathsToMove(conf,
"-p", "/foo", "/bar");
Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(1, namenodes.size());
Assert.assertEquals(1, movePaths.size());
URI nn = namenodes.iterator().next();
Assert.assertEquals(new URI("hdfs://MyCluster"), nn);
Assert.assertTrue(movePaths.containsKey(nn));
checkMovePaths(movePaths.get(nn), new Path("/foo"), new Path("/bar"));
} finally {
cluster.shutdown();
}
}
@Test
public void testMoverCliWithFederation() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster
.Builder(new HdfsConfiguration())
.nnTopology(MiniDFSNNTopology.simpleFederatedTopology(3))
.numDataNodes(0).build();
final Configuration conf = new HdfsConfiguration();
DFSTestUtil.setFederatedConfiguration(cluster, conf);
try {
Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(3, namenodes.size());
try {
Mover.Cli.getNameNodePathsToMove(conf, "-p", "/foo");
Assert.fail("Expect exception for missing authority information");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains(
"does not contain scheme and authority", e);
}
try {
Mover.Cli.getNameNodePathsToMove(conf, "-p", "hdfs:///foo");
Assert.fail("Expect exception for missing authority information");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains(
"does not contain scheme and authority", e);
}
try {
Mover.Cli.getNameNodePathsToMove(conf, "-p", "wrong-hdfs://ns1/foo");
Assert.fail("Expect exception for wrong scheme");
} catch (IllegalArgumentException e) {
GenericTestUtils.assertExceptionContains("Cannot resolve the path", e);
}
Iterator<URI> iter = namenodes.iterator();
URI nn1 = iter.next();
URI nn2 = iter.next();
Map<URI, List<Path>> movePaths = Mover.Cli.getNameNodePathsToMove(conf,
"-p", nn1 + "/foo", nn1 + "/bar", nn2 + "/foo/bar");
Assert.assertEquals(2, movePaths.size());
checkMovePaths(movePaths.get(nn1), new Path("/foo"), new Path("/bar"));
checkMovePaths(movePaths.get(nn2), new Path("/foo/bar"));
} finally {
cluster.shutdown();
}
}
@Test
public void testMoverCliWithFederationHA() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster
.Builder(new HdfsConfiguration())
.nnTopology(MiniDFSNNTopology.simpleHAFederatedTopology(3))
.numDataNodes(0).build();
final Configuration conf = new HdfsConfiguration();
DFSTestUtil.setFederatedHAConfiguration(cluster, conf);
try {
Collection<URI> namenodes = DFSUtil.getNsServiceRpcUris(conf);
Assert.assertEquals(3, namenodes.size());
Iterator<URI> iter = namenodes.iterator();
URI nn1 = iter.next();
URI nn2 = iter.next();
URI nn3 = iter.next();
Map<URI, List<Path>> movePaths = Mover.Cli.getNameNodePathsToMove(conf,
"-p", nn1 + "/foo", nn1 + "/bar", nn2 + "/foo/bar", nn3 + "/foobar");
Assert.assertEquals(3, movePaths.size());
checkMovePaths(movePaths.get(nn1), new Path("/foo"), new Path("/bar"));
checkMovePaths(movePaths.get(nn2), new Path("/foo/bar"));
checkMovePaths(movePaths.get(nn3), new Path("/foobar"));
} finally {
cluster.shutdown();
}
}
}
| apache-2.0 |
Seinlin/gerrit | gerrit-gpg/src/test/java/com/google/gerrit/gpg/GerritPublicKeyCheckerTest.java | 6214 | // Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.gpg;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.gpg.testutil.TestKey;
import com.google.gerrit.lifecycle.LifecycleManager;
import com.google.gerrit.reviewdb.client.Account;
import com.google.gerrit.reviewdb.client.AccountExternalId;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.account.AccountCache;
import com.google.gerrit.server.account.AccountManager;
import com.google.gerrit.server.account.AuthRequest;
import com.google.gerrit.server.schema.SchemaCreator;
import com.google.gerrit.server.util.RequestContext;
import com.google.gerrit.server.util.ThreadLocalRequestContext;
import com.google.gerrit.testutil.InMemoryDatabase;
import com.google.gerrit.testutil.InMemoryModule;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import com.google.inject.Provider;
import com.google.inject.util.Providers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
/** Unit tests for {@link GerritPublicKeyChecker}. */
public class GerritPublicKeyCheckerTest {
@Inject
private AccountCache accountCache;
@Inject
private AccountManager accountManager;
@Inject
private GerritPublicKeyChecker checker;
@Inject
private IdentifiedUser.GenericFactory userFactory;
@Inject
private InMemoryDatabase schemaFactory;
@Inject
private SchemaCreator schemaCreator;
@Inject
private ThreadLocalRequestContext requestContext;
private LifecycleManager lifecycle;
private ReviewDb db;
private Account.Id userId;
private IdentifiedUser user;
@Before
public void setUpInjector() throws Exception {
Injector injector = Guice.createInjector(new InMemoryModule());
lifecycle = new LifecycleManager();
lifecycle.add(injector);
injector.injectMembers(this);
lifecycle.start();
db = schemaFactory.open();
schemaCreator.create(db);
userId =
accountManager.authenticate(AuthRequest.forUser("user")).getAccountId();
Account userAccount = db.accounts().get(userId);
// Note: does not match any key in TestKey.
userAccount.setPreferredEmail("user@example.com");
db.accounts().update(ImmutableList.of(userAccount));
user = reloadUser();
requestContext.setContext(new RequestContext() {
@Override
public CurrentUser getCurrentUser() {
return user;
}
@Override
public Provider<ReviewDb> getReviewDbProvider() {
return Providers.of(db);
}
});
}
private IdentifiedUser reloadUser() {
accountCache.evict(userId);
user = userFactory.create(Providers.of(db), userId);
return user;
}
@After
public void tearDownInjector() {
if (lifecycle != null) {
lifecycle.stop();
}
if (db != null) {
db.close();
}
InMemoryDatabase.drop(schemaFactory);
}
@Test
public void defaultGpgCertificationMatchesEmail() throws Exception {
TestKey key = TestKey.key5();
assertProblems(
TestKey.key5(),
"Key must contain a valid certification for one of the following "
+ "identities:\n"
+ " gerrit:user\n"
+ " username:user");
addExternalId("test", "test", "test5@example.com");
assertNoProblems(key);
}
@Test
public void defaultGpgCertificationDoesNotMatchEmail() throws Exception {
addExternalId("test", "test", "nobody@example.com");
assertProblems(
TestKey.key5(),
"Key must contain a valid certification for one of the following "
+ "identities:\n"
+ " gerrit:user\n"
+ " nobody@example.com\n"
+ " test:test\n"
+ " username:user");
}
@Test
public void manualCertificationMatchesExternalId() throws Exception {
addExternalId("foo", "myId", null);
assertNoProblems(TestKey.key5());
}
@Test
public void manualCertificationDoesNotExternalId() throws Exception {
addExternalId("foo", "otherId", null);
assertProblems(
TestKey.key5(),
"Key must contain a valid certification for one of the following "
+ "identities:\n"
+ " foo:otherId\n"
+ " gerrit:user\n"
+ " username:user");
}
@Test
public void noExternalIds() throws Exception {
db.accountExternalIds().delete(
db.accountExternalIds().byAccount(user.getAccountId()));
reloadUser();
assertProblems(
TestKey.key5(),
"No identities found for user; check"
+ " http://test/#/settings/web-identities");
}
private void assertNoProblems(TestKey key) throws Exception {
assertThat(checker.check(key.getPublicKey()).getProblems()).isEmpty();
}
private void assertProblems(TestKey key, String... expected)
throws Exception {
checkArgument(expected.length > 0);
assertThat(checker.check(key.getPublicKey()).getProblems())
.containsExactly((Object[]) expected)
.inOrder();
}
private void addExternalId(String scheme, String id, String email)
throws Exception {
AccountExternalId extId = new AccountExternalId(user.getAccountId(),
new AccountExternalId.Key(scheme, id));
if (email != null) {
extId.setEmailAddress(email);
}
db.accountExternalIds().insert(Collections.singleton(extId));
reloadUser();
}
}
| apache-2.0 |
qbert65536/squall | src/test/php/scratch/Post.java | 2042 | package scratch;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONObject;
import java.sql.SQLException;
public class Post extends SquallBean {
boolean isDirty = false;
public static Post create() { return new Post();}
public Long upsert() throws SQLException { return PostDAO.upsert(this); }
public Boolean remove() throws SQLException { return PostDAO.remove(this.id); }
public static List<Post> find(String whereClause, String sortColumn, Integer lowerLimit, Integer upperLimit) throws SQLException { return PostDAO.find(whereClause, sortColumn, lowerLimit, upperLimit); }
public static List<Post> find(String whereClause, String sortColumn) throws SQLException { return PostDAO.find(whereClause, sortColumn); }
public static List<Post> find(String whereClause) throws SQLException { return PostDAO.find(whereClause); }
public static Post findOne(String whereClause) throws SQLException { return PostDAO.findOne(whereClause); }
// End freemarker template, now write java generated code ( legacy, move this to twirl #eventually ).
@Override
public String toString() { return this.id != null ? Long.toString(this.id) : null ;}
public String toPrettyString() {
String s = "Post(";
s +=
"description(" + description + ") " +
"content(" + content + ") " +
"";
s = s.substring(0, s.length() - 1);
s += ")";
return s;
}
// writeClassMember last, no relation found
protected String description;
public String getDescription() { return description; }
public Post setDescription(String x ) { isDirty = true;description = x; return this; }
// writeClassMember last, no relation found
protected String content;
public String getContent() { return content; }
public Post setContent(String x ) { isDirty = true;content = x; return this; }
// writeOppositeClassMember
// Specified as OneToMany() : Eager()
protected User user;
public User getUser() { return user;}
public Post setUser(User x ) { isDirty = true;user = x; return this;}
}
| apache-2.0 |
pax95/camel | dsl/camel-kamelet-main/src/test/java/org/apache/camel/main/KameletMainTest.java | 2589 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.main;
import org.apache.camel.StartupSummaryLevel;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
@Disabled("Manual test")
public class KameletMainTest {
@Test
public void testKameletMain() throws Exception {
KameletMain main = new KameletMain();
main.setDownload(true);
main.configure().withDurationMaxSeconds(5);
main.configure().withRoutesIncludePattern("file:src/test/resources/my-route.yaml");
main.run();
}
@Test
public void testReload() throws Exception {
KameletMain main = new KameletMain();
main.configure().setStartupSummaryLevel(StartupSummaryLevel.Verbose);
main.setDownload(true);
main.configure().withDurationMaxMessages(10);
main.configure().withDurationMaxAction("stop");
main.configure().withRoutesIncludePattern("file:src/test/resources/my-route.yaml");
main.configure().withRoutesReloadEnabled(true);
main.configure().withRoutesReloadDirectory("src/test/resources");
main.configure().withRoutesReloadPattern("my-route.yaml");
main.run();
}
@Test
public void testReloadCamelK() throws Exception {
KameletMain main = new KameletMain();
main.setDownload(true);
main.configure().withShutdownTimeout(5);
main.configure().withDurationMaxMessages(10);
main.configure().withDurationMaxAction("stop");
main.configure().withRoutesIncludePattern("file:src/test/resources/my-camel-k.yaml");
main.configure().withRoutesReloadEnabled(true);
main.configure().withRoutesReloadDirectory("src/test/resources");
main.configure().withRoutesReloadPattern("my-camel-k.yaml");
main.run();
}
}
| apache-2.0 |
mswiderski/drools | drools-core/src/main/java/org/drools/base/evaluators/EvaluatorRegistry.java | 11151 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.base.evaluators;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.drools.RuntimeDroolsException;
import org.drools.base.ValueType;
import org.drools.common.DroolsObjectInput;
import org.drools.spi.Evaluator;
/**
* A registry class for all available evaluators
*/
public class EvaluatorRegistry
implements
Externalizable {
private static final long serialVersionUID = 510l;
private Map<String, EvaluatorDefinition> evaluators;
private ClassLoader classloader;
/**
* Default constructor. The registry will use the context classloader (if available)
* to load the evaluator definition classes or this class classloader if it is
* not available.
*/
public EvaluatorRegistry() {
this( null );
}
/**
* Creates a new EvaluatorRegistry using the given classloader to load
* the evaluator definition classes.
*
* @param classloader the classloader to use to load evaluator definition
* classes. If it is null, try to obtain the context
* classloader. If it is also null, uses the same classloader
* that loaded this class.
*/
public EvaluatorRegistry(ClassLoader classloader) {
this.evaluators = new HashMap<String, EvaluatorDefinition>();
if ( classloader != null ) {
this.classloader = classloader;
} else {
this.classloader = getDefaultClassLoader();
}
// loading default built in evaluators
this.addEvaluatorDefinition( new BeforeEvaluatorDefinition() );
this.addEvaluatorDefinition( new AfterEvaluatorDefinition() );
this.addEvaluatorDefinition( new MeetsEvaluatorDefinition() );
this.addEvaluatorDefinition( new MetByEvaluatorDefinition() );
this.addEvaluatorDefinition( new OverlapsEvaluatorDefinition() );
this.addEvaluatorDefinition( new OverlappedByEvaluatorDefinition() );
this.addEvaluatorDefinition( new IncludesEvaluatorDefinition() );
this.addEvaluatorDefinition( new DuringEvaluatorDefinition() );
this.addEvaluatorDefinition( new FinishesEvaluatorDefinition() );
this.addEvaluatorDefinition( new FinishedByEvaluatorDefinition() );
this.addEvaluatorDefinition( new StartsEvaluatorDefinition() );
this.addEvaluatorDefinition( new StartedByEvaluatorDefinition() );
this.addEvaluatorDefinition( new CoincidesEvaluatorDefinition() );
this.addEvaluatorDefinition( new StrEvaluatorDefinition() );
this.addEvaluatorDefinition( new IsAEvaluatorDefinition() );
}
/**
* Return the set of registered keys.
* @return a Set of Strings
*/
public Set<String> keySet() {
return evaluators.keySet();
}
@SuppressWarnings("unchecked")
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
evaluators = (Map<String, EvaluatorDefinition>) in.readObject();
if ( in instanceof DroolsObjectInput ) {
classloader = ((DroolsObjectInput) in).getClassLoader();
} else {
classloader = getDefaultClassLoader();
}
}
public void writeExternal( ObjectOutput out ) throws IOException {
out.writeObject( evaluators );
}
private static ClassLoader getDefaultClassLoader() {
if ( Thread.currentThread().getContextClassLoader() != null ) return Thread.currentThread().getContextClassLoader();
return EvaluatorRegistry.class.getClassLoader();
}
/**
* Adds an evaluator definition class to the registry using the
* evaluator class name. The class will be loaded and the corresponting
* evaluator ID will be added to the registry. In case there exists
* an implementation for that ID already, the new implementation will
* replace the previous one.
*
* @param className the name of the class for the implementation definition.
* The class must implement the EvaluatorDefinition interface.
*
* @return true if the new class implementation is replacing an old
* implementation for the same evaluator ID. False otherwise.
*/
@SuppressWarnings("unchecked")
public void addEvaluatorDefinition( String className ) {
try {
Class<EvaluatorDefinition> defClass = (Class<EvaluatorDefinition>) this.classloader.loadClass( className );
EvaluatorDefinition def = defClass.newInstance();
addEvaluatorDefinition( def );
} catch ( ClassNotFoundException e ) {
throw new RuntimeDroolsException( "Class not found for evaluator definition: " + className,
e );
} catch ( InstantiationException e ) {
throw new RuntimeDroolsException( "Error instantiating class for evaluator definition: " + className,
e );
} catch ( IllegalAccessException e ) {
throw new RuntimeDroolsException( "Illegal access instantiating class for evaluator definition: " + className,
e );
}
}
/**
* Adds an evaluator definition class to the registry. In case there exists
* an implementation for that evaluator ID already, the new implementation will
* replace the previous one.
*
* @param def the evaluator definition to be added.
*/
public void addEvaluatorDefinition( EvaluatorDefinition def ) {
for ( String id : def.getEvaluatorIds() ) {
this.evaluators.put( id,
def );
}
}
/**
* Returns the evaluator definition for the given evaluator ID
* or null if no one was found
*
* @param evaluatorId
* @return
*/
public EvaluatorDefinition getEvaluatorDefinition( String evaluatorId ) {
return this.evaluators.get( evaluatorId );
}
/**
* Returns the evaluator definition for the given operator
* or null if no one was found
*
* @param operator the operator implemented by the evaluator definition
* @return
*/
public EvaluatorDefinition getEvaluatorDefinition( Operator operator ) {
return this.evaluators.get( operator.getOperatorString() );
}
/**
* Returns the evaluator instance for the given type and the
* defined parameterText
*
* @param type the type of the attributes this evaluator will
* operate on. This is important because the evaluator
* may do optimizations and type coercion based on the
* types it is evaluating. It is also possible that
* this evaluator does not support a given type.
*
* @param operatorId the string identifier of the evaluator
*
* @param isNegated true if the evaluator instance to be returned is
* the negated version of the evaluator.
*
* @param parameterText some evaluators support parameters and these
* parameters are defined as a String that is
* parsed by the evaluator itself.
*
* @return an Evaluator instance capable of evaluating expressions
* between values of the given type, or null in case the type
* is not supported.
*/
public Evaluator getEvaluator( ValueType type,
String operatorId,
boolean isNegated,
String parameterText ) {
return this.getEvaluatorDefinition( operatorId ).getEvaluator( type,
operatorId,
isNegated,
parameterText );
}
/**
* Returns the evaluator instance for the given type and the
* defined parameterText
*
* @param type the type of the attributes this evaluator will
* operate on. This is important because the evaluator
* may do optimizations and type coercion based on the
* types it is evaluating. It is also possible that
* this evaluator does not support a given type.
*
* @param operator the operator that evaluator implements
*
* @param parameterText some evaluators support parameters and these
* parameters are defined as a String that is
* parsed by the evaluator itself.
*
* @return an Evaluator instance capable of evaluating expressions
* between values of the given type, or null in case the type
* is not supported.
*/
public Evaluator getEvaluator( ValueType type,
Operator operator,
String parameterText ) {
return this.getEvaluatorDefinition( operator ).getEvaluator( type,
operator,
parameterText );
}
/**
* Returns the evaluator instance for the given type and the
* defined parameterText
*
* @param type the type of the attributes this evaluator will
* operate on. This is important because the evaluator
* may do optimizations and type coercion based on the
* types it is evaluating. It is also possible that
* this evaluator does not support a given type.
*
* @param operator the operator that evaluator implements
*
* @return an Evaluator instance capable of evaluating expressions
* between values of the given type, or null in case the type
* is not supported.
*/
public Evaluator getEvaluator( ValueType type,
Operator operator ) {
return this.getEvaluatorDefinition( operator ).getEvaluator( type,
operator );
}
}
| apache-2.0 |
hongyuhong/flink | flink-libraries/flink-gelly-examples/src/main/java/org/apache/flink/graph/examples/data/TriangleCountData.java | 2701 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.graph.examples.data;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.graph.Edge;
import org.apache.flink.types.NullValue;
import java.util.ArrayList;
import java.util.List;
/**
* Provides the default data sets used for the Triangle Count test program.
* If no parameters are given to the program, the default data sets are used.
*/
public class TriangleCountData {
public static final String EDGES = "1 2\n" +
"1 3\n" +
"2 3\n" +
"2 6\n" +
"3 4\n" +
"3 5\n" +
"3 6\n" +
"4 5\n" +
"6 7\n";
public static DataSet<Edge<Long, NullValue>> getDefaultEdgeDataSet(ExecutionEnvironment env) {
List<Edge<Long, NullValue>> edges = new ArrayList<Edge<Long, NullValue>>();
edges.add(new Edge<Long, NullValue>(1L, 2L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(1L, 3L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(2L, 3L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(2L, 6L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(3L, 4L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(3L, 5L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(3L, 6L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(4L, 5L, NullValue.getInstance()));
edges.add(new Edge<Long, NullValue>(6L, 7L, NullValue.getInstance()));
return env.fromCollection(edges);
}
public static final String RESULTED_NUMBER_OF_TRIANGLES = "3";
public static List<Tuple3<Long, Long, Long>> getListOfTriangles() {
ArrayList<Tuple3<Long, Long, Long>> ret = new ArrayList<>(3);
ret.add(new Tuple3<>(1L, 2L, 3L));
ret.add(new Tuple3<>(2L, 3L, 6L));
ret.add(new Tuple3<>(4L, 3L, 5L));
return ret;
}
private TriangleCountData () {}
}
| apache-2.0 |
juwi/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStripeStoreEngine.java | 5529 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.compactions.NoLimitCompactionThroughputController;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy;
import org.apache.hadoop.hbase.regionserver.compactions.StripeCompactor;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionThroughputController;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class})
public class TestStripeStoreEngine {
@Test
public void testCreateBasedOnConfig() throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, TestStoreEngine.class.getName());
StripeStoreEngine se = createEngine(conf);
assertTrue(se.getCompactionPolicy() instanceof StripeCompactionPolicy);
}
public static class TestStoreEngine extends StripeStoreEngine {
public void setCompactorOverride(StripeCompactor compactorOverride) {
this.compactor = compactorOverride;
}
}
@Test
public void testCompactionContextForceSelect() throws Exception {
Configuration conf = HBaseConfiguration.create();
int targetCount = 2;
conf.setInt(StripeStoreConfig.INITIAL_STRIPE_COUNT_KEY, targetCount);
conf.setInt(StripeStoreConfig.MIN_FILES_L0_KEY, 2);
conf.set(StoreEngine.STORE_ENGINE_CLASS_KEY, TestStoreEngine.class.getName());
TestStoreEngine se = createEngine(conf);
StripeCompactor mockCompactor = mock(StripeCompactor.class);
se.setCompactorOverride(mockCompactor);
when(
mockCompactor.compact(any(CompactionRequest.class), anyInt(), anyLong(), any(byte[].class),
any(byte[].class), any(byte[].class), any(byte[].class),
any(CompactionThroughputController.class))).thenReturn(new ArrayList<Path>());
// Produce 3 L0 files.
StoreFile sf = createFile();
ArrayList<StoreFile> compactUs = al(sf, createFile(), createFile());
se.getStoreFileManager().loadFiles(compactUs);
// Create a compaction that would want to split the stripe.
CompactionContext compaction = se.createCompaction();
compaction.select(al(), false, false, false);
assertEquals(3, compaction.getRequest().getFiles().size());
// Override the file list. Granted, overriding this compaction in this manner will
// break things in real world, but we only want to verify the override.
compactUs.remove(sf);
CompactionRequest req = new CompactionRequest(compactUs);
compaction.forceSelect(req);
assertEquals(2, compaction.getRequest().getFiles().size());
assertFalse(compaction.getRequest().getFiles().contains(sf));
// Make sure the correct method it called on compactor.
compaction.compact(NoLimitCompactionThroughputController.INSTANCE);
verify(mockCompactor, times(1)).compact(compaction.getRequest(), targetCount, 0L,
StripeStoreFileManager.OPEN_KEY, StripeStoreFileManager.OPEN_KEY, null, null,
NoLimitCompactionThroughputController.INSTANCE);
}
private static StoreFile createFile() throws Exception {
StoreFile sf = mock(StoreFile.class);
when(sf.getMetadataValue(any(byte[].class)))
.thenReturn(StripeStoreFileManager.INVALID_KEY);
when(sf.getReader()).thenReturn(mock(StoreFile.Reader.class));
when(sf.getPath()).thenReturn(new Path("moo"));
return sf;
}
private static TestStoreEngine createEngine(Configuration conf) throws Exception {
Store store = mock(Store.class);
CellComparator kvComparator = mock(CellComparator.class);
return (TestStoreEngine)StoreEngine.create(store, conf, kvComparator);
}
private static ArrayList<StoreFile> al(StoreFile... sfs) {
return new ArrayList<StoreFile>(Arrays.asList(sfs));
}
}
| apache-2.0 |
reynoldsm88/droolsjbpm-integration | kie-server-parent/kie-server-services/kie-server-services-common/src/main/java/org/kie/server/services/impl/KieServerEventSupport.java | 4262 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.server.services.impl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.ServiceLoader;
import org.kie.server.services.api.KieContainerInstance;
import org.kie.server.services.api.KieServer;
import org.kie.server.services.api.KieServerEventListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KieServerEventSupport {
private static final Logger logger = LoggerFactory.getLogger(KieServerEventSupport.class);
private static final ServiceLoader<KieServerEventListener> eventListenersLoader = ServiceLoader.load(KieServerEventListener.class);
private List<KieServerEventListener> eventListeners = new ArrayList<>();
public KieServerEventSupport() {
eventListenersLoader.forEach(
listener -> {
eventListeners.add(listener);
logger.debug("Found kie server event listener {}", listener);
}
);
}
public void fireBeforeServerStarted(KieServer kieServer) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().beforeServerStarted(kieServer);
} while (iter.hasNext());
}
}
public void fireAfterServerStarted(KieServer kieServer) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().afterServerStarted(kieServer);
} while (iter.hasNext());
}
}
public void fireBeforeServerStopped(KieServer kieServer) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().beforeServerStopped(kieServer);
} while (iter.hasNext());
}
}
public void fireAfterServerStopped(KieServer kieServer) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().afterServerStopped(kieServer);
} while (iter.hasNext());
}
}
public void fireBeforeContainerStarted(KieServer kieServer, KieContainerInstance containerInstance) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().beforeContainerStarted(kieServer, containerInstance);
} while (iter.hasNext());
}
}
public void fireAfterContainerStarted(KieServer kieServer, KieContainerInstance containerInstance) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().afterContainerStarted(kieServer, containerInstance);
} while (iter.hasNext());
}
}
public void fireBeforeContainerStopped(KieServer kieServer, KieContainerInstance containerInstance) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().beforeContainerStopped(kieServer, containerInstance);
} while (iter.hasNext());
}
}
public void fireAfterContainerStopped(KieServer kieServer, KieContainerInstance containerInstance) {
final Iterator<KieServerEventListener> iter = eventListeners.iterator();
if (iter.hasNext()) {
do{
iter.next().afterContainerStopped(kieServer, containerInstance);
} while (iter.hasNext());
}
}
}
| apache-2.0 |
dasomel/egovframework | common-component/v3.7.3/src/main/java/egovframework/com/cop/bbs/web/EgovBBSUseInfoManageController.java | 8751 | package egovframework.com.cop.bbs.web;
import java.util.Map;
import egovframework.com.cmm.EgovComponentChecker;
import egovframework.com.cmm.LoginVO;
import egovframework.com.cmm.annotation.IncludedInfo;
import egovframework.com.cmm.util.EgovUserDetailsHelper;
import egovframework.com.cop.bbs.service.BoardUseInf;
import egovframework.com.cop.bbs.service.BoardUseInfVO;
import egovframework.com.cop.bbs.service.EgovBBSUseInfoManageService;
import egovframework.rte.fdl.property.EgovPropertyService;
import egovframework.rte.ptl.mvc.tags.ui.pagination.PaginationInfo;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.support.SessionStatus;
import org.springmodules.validation.commons.DefaultBeanValidator;
/**
* 게시판의 이용정보를 관리하기 위한 컨트롤러 클래스
* @author 공통서비스개발팀 이삼섭
* @since 2009.06.01
* @version 1.0
* @see
*
* <pre>
* << 개정이력(Modification Information) >>
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2009.4.2 이삼섭 최초 생성
* 2011.07.21 안민정 커뮤니티 관련 메소드 분리 (-> EgovCmyUserInfController)
* 2011.8.26 정진오 IncludedInfo annotation 추가
* 2011.09.15 서준식 커뮤니티, 동호회 컴포넌트 사용여부 체크 로직 추가
* </pre>
*/
@Controller
public class EgovBBSUseInfoManageController {
@Resource(name = "EgovBBSUseInfoManageService")
private EgovBBSUseInfoManageService bbsUseService;
@Resource(name = "propertiesService")
protected EgovPropertyService propertyService;
@Autowired
private DefaultBeanValidator beanValidator;
//protected Logger log = Logger.getLogger(this.getClass());
/**
* 게시판 사용 정보를 삭제한다.
*
* @param bdUseVO
* @param bdUseInf
* @param sessionVO
* @param status
* @param model
* @return
* @throws Exception
*/
@RequestMapping("/cop/com/deleteBBSUseInf.do")
public String deleteBBSUseInf(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, @ModelAttribute("bdUseInf") BoardUseInf bdUseInf,
SessionStatus status, ModelMap model) throws Exception {
//LoginVO user = (LoginVO)EgovUserDetailsHelper.getAuthenticatedUser();
Boolean isAuthenticated = EgovUserDetailsHelper.isAuthenticated();
if (isAuthenticated) {
bbsUseService.deleteBBSUseInf(bdUseInf);
}
return "forward:/cop/com/selectBBSUseInfs.do";
}
/**
* 게사판 사용정보 등록을 위한 등록페이지로 이동한다.
*
* @param bdUseVO
* @param sessionVO
* @param model
* @return
* @throws Exception
*/
@RequestMapping("/cop/com/addBBSUseInf.do")
public String addBBSUseInf(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, ModelMap model) throws Exception {
if(EgovComponentChecker.hasComponent("EgovCommunityManageService")){//2011.09.15
model.addAttribute("useCommunity", "true");
}
if(EgovComponentChecker.hasComponent("EgovClubManageService")){//2011.09.15
model.addAttribute("useClub", "true");
}
return "egovframework/com/cop/com/EgovBoardUseInfRegist";
}
/**
* 게시판 사용정보를 등록한다.
*
* @param bdUseVO
* @param bdUseInf
* @param sessionVO
* @param status
* @param model
* @return
* @throws Exception
*/
@RequestMapping("/cop/com/insertBBSUseInf.do")
public String insertBBSUseInf(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, @ModelAttribute("boardUseInf") BoardUseInf boardUseInf,
BindingResult bindingResult, @RequestParam Map<String, Object> commandMap, SessionStatus status, ModelMap model) throws Exception {
LoginVO user = (LoginVO)EgovUserDetailsHelper.getAuthenticatedUser();
Boolean isAuthenticated = EgovUserDetailsHelper.isAuthenticated();
beanValidator.validate(boardUseInf, bindingResult);
if (bindingResult.hasErrors()) {
return "egovframework/com/cop/com/EgovBoardUseInfRegist";
}
String trgetType = (String)commandMap.get("param_trgetType");
String registSeCode = "";
// CMMNTY 06/CLUB 05/SYSTEM(REGC01)
if ("CMMNTY".equals(trgetType)) {
registSeCode = "REGC06";
} else if ("CLUB".equals(trgetType)) {
registSeCode = "REGC05";
} else {
registSeCode = "REGC01";
}
boardUseInf.setUseAt("Y");
boardUseInf.setFrstRegisterId(user.getUniqId());
boardUseInf.setRegistSeCode(registSeCode);
if (isAuthenticated) {
bbsUseService.insertBBSUseInf(boardUseInf);
}
return "forward:/cop/com/selectBBSUseInfs.do";
}
/**
* 게시판 사용정보 목록을 조회한다.
*
* @param bdUseVO
* @param sessionVO
* @param model
* @return
* @throws Exception
*/
@IncludedInfo(name="게시판사용정보", order = 190 ,gid = 40)
@RequestMapping("/cop/com/selectBBSUseInfs.do")
public String selectBBSUseInfs(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, ModelMap model) throws Exception {
bdUseVO.setPageUnit(propertyService.getInt("pageUnit"));
bdUseVO.setPageSize(propertyService.getInt("pageSize"));
PaginationInfo paginationInfo = new PaginationInfo();
paginationInfo.setCurrentPageNo(bdUseVO.getPageIndex());
paginationInfo.setRecordCountPerPage(bdUseVO.getPageUnit());
paginationInfo.setPageSize(bdUseVO.getPageSize());
bdUseVO.setFirstIndex(paginationInfo.getFirstRecordIndex());
bdUseVO.setLastIndex(paginationInfo.getLastRecordIndex());
bdUseVO.setRecordCountPerPage(paginationInfo.getRecordCountPerPage());
Map<String, Object> map = bbsUseService.selectBBSUseInfs(bdUseVO);
int totCnt = Integer.parseInt((String)map.get("resultCnt"));
paginationInfo.setTotalRecordCount(totCnt);
model.addAttribute("resultList", map.get("resultList"));
model.addAttribute("resultCnt", map.get("resultCnt"));
model.addAttribute("paginationInfo", paginationInfo);
if(EgovComponentChecker.hasComponent("EgovCommunityManageService")){//2011.09.15
model.addAttribute("useCommunity", "true");
}
if(EgovComponentChecker.hasComponent("EgovClubManageService")){//2011.09.15
model.addAttribute("useClub", "true");
}
return "egovframework/com/cop/com/EgovBoardUseInfList";
}
/**
* 게시판 사용정보를 수정한다.
*
* @param bdUseVO
* @param bdUseInf
* @param sessionVO
* @param status
* @param model
* @return
* @throws Exception
*/
@RequestMapping("/cop/com/updateBBSUseInf.do")
public String updateBBSUseInf(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, @ModelAttribute("boardUseInf") BoardUseInf boardUseInf,
SessionStatus status, ModelMap model) throws Exception {
//LoginVO user = (LoginVO)EgovUserDetailsHelper.getAuthenticatedUser();
Boolean isAuthenticated = EgovUserDetailsHelper.isAuthenticated();
if (isAuthenticated) {
bbsUseService.updateBBSUseInf(boardUseInf);
}
return "forward:/cop/com/selectBBSUseInfs.do";
}
/**
* 게시판 사용정보에 대한 상세정보를 조회한다.
*
* @param bdUseVO
* @param sessionVO
* @param model
* @return
* @throws Exception
*/
@RequestMapping("/cop/com/selectBBSUseInf.do")
public String selectBBSUseInf(@ModelAttribute("searchVO") BoardUseInfVO bdUseVO, ModelMap model, HttpServletRequest request) throws Exception {
BoardUseInfVO vo = bbsUseService.selectBBSUseInf(bdUseVO);
// 시스템 사용 게시판의 경우 URL 표시
if ("SYSTEM_DEFAULT_BOARD".equals(vo.getTrgetId())) {
if (vo.getBbsTyCode().equals("BBST02")) { // 익명게시판
vo.setProvdUrl(request.getContextPath()+ "/cop/bbs/anonymous/selectBoardList.do?bbsId=" + vo.getBbsId());
} else {
vo.setProvdUrl(request.getContextPath()+ "/cop/bbs/selectBoardList.do?bbsId=" + vo.getBbsId());
}
}
model.addAttribute("bdUseVO", vo);
return "egovframework/com/cop/com/EgovBoardUseInfInqire";
}
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/blacktie/jatmibroker-xatmi/src/main/java/org/jboss/narayana/blacktie/jatmibroker/core/conf/Machine.java | 2540 | /*
* JBoss, Home of Professional Open Source
* Copyright 2008, Red Hat, Inc., and others contributors as indicated
* by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.jboss.narayana.blacktie.jatmibroker.core.conf;
public class Machine {
private String id;
private String hostname;
private String ipAddress;
private String pathToExecutable;
private String workingDirectory;
private int serverId;
private String argLine;
private Server server;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getHostname() {
return hostname;
}
public void setHostname(String hostname) {
this.hostname = hostname;
}
public String getPathToExecutable() {
return pathToExecutable;
}
public String getIpAddress() {
return ipAddress;
}
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
public void setPathToExecutable(String pathToExecutable) {
this.pathToExecutable = pathToExecutable;
}
public String getWorkingDirectory() {
return workingDirectory;
}
public void setWorkingDirectory(String workingDirectory) {
this.workingDirectory = workingDirectory;
}
public int getServerId() {
return serverId;
}
public void setServerId(int serverId) {
this.serverId = serverId;
}
public String getArgLine() {
return argLine;
}
public void setArgLine(String argLine) {
this.argLine = argLine;
}
public void setServer(Server server) {
this.server = server;
}
public Server getServer() {
return server;
}
}
| apache-2.0 |
shs96c/buck | src/com/facebook/buck/android/GetStringsFilesStep.java | 2737 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.io.file.MorePaths;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.StepExecutionResults;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.nio.file.Path;
import java.util.function.Predicate;
import java.util.regex.Pattern;
/**
* Generates a list of strings.xml files
*
* <p>The ordering of strings files is consistent with the order of the input resource directories
*/
public class GetStringsFilesStep implements Step {
@VisibleForTesting
static final Pattern STRINGS_FILE_PATH =
Pattern.compile("(\\b|.*/)res/values(-.+)*/strings.xml", Pattern.CASE_INSENSITIVE);
private final ProjectFilesystem filesystem;
private final ImmutableList<Path> resDirs;
private final ImmutableList.Builder<Path> stringFilesBuilder;
/** @param resDirs list of {@code res} directories to find strings.xml files from */
GetStringsFilesStep(
ProjectFilesystem filesystem,
ImmutableList<Path> resDirs,
ImmutableList.Builder<Path> stringFilesBuilder) {
this.filesystem = filesystem;
this.resDirs = resDirs;
this.stringFilesBuilder = stringFilesBuilder;
}
@Override
public StepExecutionResult execute(ExecutionContext context)
throws IOException, InterruptedException {
Predicate<Path> filter =
pathRelativeToProjectRoot -> {
String filePath = MorePaths.pathWithUnixSeparators(pathRelativeToProjectRoot);
return STRINGS_FILE_PATH.matcher(filePath).matches();
};
for (Path resDir : resDirs) {
stringFilesBuilder.addAll(filesystem.getFilesUnderPath(resDir, filter));
}
return StepExecutionResults.SUCCESS;
}
@Override
public String getShortName() {
return "get_strings_files ";
}
@Override
public String getDescription(ExecutionContext context) {
return getShortName();
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-videointelligence/v1/1.31.0/com/google/api/services/videointelligence/v1/model/GoogleCloudVideointelligenceV1p3beta1LabelFrame.java | 3202 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.videointelligence.v1.model;
/**
* Video frame level annotation results for label detection.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Video Intelligence API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudVideointelligenceV1p3beta1LabelFrame extends com.google.api.client.json.GenericJson {
/**
* Confidence that the label is accurate. Range: [0, 1].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float confidence;
/**
* Time-offset, relative to the beginning of the video, corresponding to the video frame for this
* location.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String timeOffset;
/**
* Confidence that the label is accurate. Range: [0, 1].
* @return value or {@code null} for none
*/
public java.lang.Float getConfidence() {
return confidence;
}
/**
* Confidence that the label is accurate. Range: [0, 1].
* @param confidence confidence or {@code null} for none
*/
public GoogleCloudVideointelligenceV1p3beta1LabelFrame setConfidence(java.lang.Float confidence) {
this.confidence = confidence;
return this;
}
/**
* Time-offset, relative to the beginning of the video, corresponding to the video frame for this
* location.
* @return value or {@code null} for none
*/
public String getTimeOffset() {
return timeOffset;
}
/**
* Time-offset, relative to the beginning of the video, corresponding to the video frame for this
* location.
* @param timeOffset timeOffset or {@code null} for none
*/
public GoogleCloudVideointelligenceV1p3beta1LabelFrame setTimeOffset(String timeOffset) {
this.timeOffset = timeOffset;
return this;
}
@Override
public GoogleCloudVideointelligenceV1p3beta1LabelFrame set(String fieldName, Object value) {
return (GoogleCloudVideointelligenceV1p3beta1LabelFrame) super.set(fieldName, value);
}
@Override
public GoogleCloudVideointelligenceV1p3beta1LabelFrame clone() {
return (GoogleCloudVideointelligenceV1p3beta1LabelFrame) super.clone();
}
}
| apache-2.0 |
treasure-data/presto | presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/test/RuleAssert.java | 11954 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.iterative.rule.test;
import com.google.common.collect.ImmutableSet;
import io.prestosql.Session;
import io.prestosql.cost.CachingCostProvider;
import io.prestosql.cost.CachingStatsProvider;
import io.prestosql.cost.CostCalculator;
import io.prestosql.cost.CostProvider;
import io.prestosql.cost.PlanNodeStatsEstimate;
import io.prestosql.cost.StatsAndCosts;
import io.prestosql.cost.StatsCalculator;
import io.prestosql.cost.StatsProvider;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.matching.Capture;
import io.prestosql.matching.Match;
import io.prestosql.matching.Pattern;
import io.prestosql.metadata.Metadata;
import io.prestosql.security.AccessControl;
import io.prestosql.sql.planner.Plan;
import io.prestosql.sql.planner.PlanNodeIdAllocator;
import io.prestosql.sql.planner.SymbolAllocator;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.assertions.PlanMatchPattern;
import io.prestosql.sql.planner.iterative.Lookup;
import io.prestosql.sql.planner.iterative.Memo;
import io.prestosql.sql.planner.iterative.Rule;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.PlanNodeId;
import io.prestosql.transaction.TransactionManager;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Stream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.MoreCollectors.toOptional;
import static io.prestosql.matching.Capture.newCapture;
import static io.prestosql.sql.planner.assertions.PlanAssert.assertPlan;
import static io.prestosql.sql.planner.planprinter.PlanPrinter.textLogicalPlan;
import static io.prestosql.transaction.TransactionBuilder.transaction;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.testng.Assert.fail;
public class RuleAssert
{
private final Metadata metadata;
private TestingStatsCalculator statsCalculator;
private final CostCalculator costCalculator;
private Session session;
private final Rule<?> rule;
private final PlanNodeIdAllocator idAllocator = new PlanNodeIdAllocator();
private TypeProvider types;
private PlanNode plan;
private final TransactionManager transactionManager;
private final AccessControl accessControl;
public RuleAssert(Metadata metadata, StatsCalculator statsCalculator, CostCalculator costCalculator, Session session, Rule<?> rule, TransactionManager transactionManager, AccessControl accessControl)
{
this.metadata = metadata;
this.statsCalculator = new TestingStatsCalculator(statsCalculator);
this.costCalculator = costCalculator;
this.session = session;
this.rule = rule;
this.transactionManager = transactionManager;
this.accessControl = accessControl;
}
public RuleAssert setSystemProperty(String key, String value)
{
return withSession(Session.builder(session)
.setSystemProperty(key, value)
.build());
}
public RuleAssert withSession(Session session)
{
this.session = session;
return this;
}
public RuleAssert overrideStats(String nodeId, PlanNodeStatsEstimate nodeStats)
{
statsCalculator.setNodeStats(new PlanNodeId(nodeId), nodeStats);
return this;
}
public RuleAssert on(Function<PlanBuilder, PlanNode> planProvider)
{
checkArgument(plan == null, "plan has already been set");
PlanBuilder builder = new PlanBuilder(idAllocator, metadata);
plan = planProvider.apply(builder);
types = builder.getTypes();
return this;
}
public void doesNotFire()
{
RuleApplication ruleApplication = applyRule();
if (ruleApplication.wasRuleApplied()) {
fail(format(
"Expected %s to not fire for:\n%s",
rule.getClass().getName(),
inTransaction(session -> textLogicalPlan(plan, ruleApplication.types, metadata, StatsAndCosts.empty(), session, 2, false))));
}
}
public void matches(PlanMatchPattern pattern)
{
RuleApplication ruleApplication = applyRule();
TypeProvider types = ruleApplication.types;
if (!ruleApplication.wasRuleApplied()) {
fail(format(
"%s did not fire for:\n%s",
rule.getClass().getName(),
formatPlan(plan, types)));
}
PlanNode actual = ruleApplication.getTransformedPlan();
if (actual == plan) { // plans are not comparable, so we can only ensure they are not the same instance
fail(format(
"%s: rule fired but return the original plan:\n%s",
rule.getClass().getName(),
formatPlan(plan, types)));
}
if (!ImmutableSet.copyOf(plan.getOutputSymbols()).equals(ImmutableSet.copyOf(actual.getOutputSymbols()))) {
fail(format(
"%s: output schema of transformed and original plans are not equivalent\n" +
"\texpected: %s\n" +
"\tactual: %s",
rule.getClass().getName(),
plan.getOutputSymbols(),
actual.getOutputSymbols()));
}
inTransaction(session -> {
assertPlan(session, metadata, ruleApplication.statsProvider, new Plan(actual, types, StatsAndCosts.empty()), ruleApplication.lookup, pattern);
return null;
});
}
private RuleApplication applyRule()
{
SymbolAllocator symbolAllocator = new SymbolAllocator(types.allTypes());
Memo memo = new Memo(idAllocator, plan);
Lookup lookup = Lookup.from(planNode -> Stream.of(memo.resolve(planNode)));
PlanNode memoRoot = memo.getNode(memo.getRootGroup());
return inTransaction(session -> applyRule(rule, memoRoot, ruleContext(statsCalculator, costCalculator, symbolAllocator, memo, lookup, session)));
}
private static <T> RuleApplication applyRule(Rule<T> rule, PlanNode planNode, Rule.Context context)
{
Capture<T> planNodeCapture = newCapture();
Pattern<T> pattern = rule.getPattern().capturedAs(planNodeCapture);
Optional<Match> match = pattern.match(planNode, context.getLookup())
.collect(toOptional());
Rule.Result result;
if (!rule.isEnabled(context.getSession()) || match.isEmpty()) {
result = Rule.Result.empty();
}
else {
result = rule.apply(match.get().capture(planNodeCapture), match.get().captures(), context);
}
return new RuleApplication(context.getLookup(), context.getStatsProvider(), context.getSymbolAllocator().getTypes(), result);
}
private String formatPlan(PlanNode plan, TypeProvider types)
{
StatsProvider statsProvider = new CachingStatsProvider(statsCalculator, session, types);
CostProvider costProvider = new CachingCostProvider(costCalculator, statsProvider, session, types);
return inTransaction(session -> textLogicalPlan(plan, types, metadata, StatsAndCosts.create(plan, statsProvider, costProvider), session, 2, false));
}
private <T> T inTransaction(Function<Session, T> transactionSessionConsumer)
{
return transaction(transactionManager, accessControl)
.singleStatement()
.execute(session, session -> {
// metadata.getCatalogHandle() registers the catalog for the transaction
session.getCatalog().ifPresent(catalog -> metadata.getCatalogHandle(session, catalog));
return transactionSessionConsumer.apply(session);
});
}
private Rule.Context ruleContext(StatsCalculator statsCalculator, CostCalculator costCalculator, SymbolAllocator symbolAllocator, Memo memo, Lookup lookup, Session session)
{
StatsProvider statsProvider = new CachingStatsProvider(statsCalculator, Optional.of(memo), lookup, session, symbolAllocator.getTypes());
CostProvider costProvider = new CachingCostProvider(costCalculator, statsProvider, Optional.of(memo), session, symbolAllocator.getTypes());
return new Rule.Context()
{
@Override
public Lookup getLookup()
{
return lookup;
}
@Override
public PlanNodeIdAllocator getIdAllocator()
{
return idAllocator;
}
@Override
public SymbolAllocator getSymbolAllocator()
{
return symbolAllocator;
}
@Override
public Session getSession()
{
return session;
}
@Override
public StatsProvider getStatsProvider()
{
return statsProvider;
}
@Override
public CostProvider getCostProvider()
{
return costProvider;
}
@Override
public void checkTimeoutNotExhausted() {}
@Override
public WarningCollector getWarningCollector()
{
return WarningCollector.NOOP;
}
};
}
private static class RuleApplication
{
private final Lookup lookup;
private final StatsProvider statsProvider;
private final TypeProvider types;
private final Rule.Result result;
public RuleApplication(Lookup lookup, StatsProvider statsProvider, TypeProvider types, Rule.Result result)
{
this.lookup = requireNonNull(lookup, "lookup is null");
this.statsProvider = requireNonNull(statsProvider, "statsProvider is null");
this.types = requireNonNull(types, "types is null");
this.result = requireNonNull(result, "result is null");
}
private boolean wasRuleApplied()
{
return !result.isEmpty();
}
public PlanNode getTransformedPlan()
{
return result.getTransformedPlan().orElseThrow(() -> new IllegalStateException("Rule did not produce transformed plan"));
}
}
private static class TestingStatsCalculator
implements StatsCalculator
{
private final StatsCalculator delegate;
private final Map<PlanNodeId, PlanNodeStatsEstimate> stats = new HashMap<>();
TestingStatsCalculator(StatsCalculator delegate)
{
this.delegate = requireNonNull(delegate, "delegate is null");
}
@Override
public PlanNodeStatsEstimate calculateStats(PlanNode node, StatsProvider sourceStats, Lookup lookup, Session session, TypeProvider types)
{
if (stats.containsKey(node.getId())) {
return stats.get(node.getId());
}
return delegate.calculateStats(node, sourceStats, lookup, session, types);
}
public void setNodeStats(PlanNodeId nodeId, PlanNodeStatsEstimate nodeStats)
{
stats.put(nodeId, nodeStats);
}
}
}
| apache-2.0 |
mingfly/opencloudb | src/main/java/org/opencloudb/paser/ast/expression/function/cast/Convert.java | 1795 | /*
* Copyright 2012-2015 org.opencloudb.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* (created at 2011-1-23)
*/
package org.opencloudb.paser.ast.expression.function.cast;
import java.util.List;
import org.opencloudb.paser.ast.expression.Expression;
import org.opencloudb.paser.ast.expression.function.FunctionExpression;
import org.opencloudb.paser.visitor.SQLASTVisitor;
/**
* @author mycat
*/
public class Convert extends FunctionExpression {
/**
* Either {@link transcodeName} or {@link typeName} is null
*/
private final String transcodeName;
public Convert(Expression arg, String transcodeName) {
super("CONVERT", wrapList(arg));
if (null == transcodeName) {
throw new IllegalArgumentException("transcodeName is null");
}
this.transcodeName = transcodeName;
}
public String getTranscodeName() {
return transcodeName;
}
@Override
public FunctionExpression constructFunction(List<Expression> arguments) {
throw new UnsupportedOperationException("function of char has special arguments");
}
@Override
public void accept(SQLASTVisitor visitor) {
visitor.visit(this);
}
} | apache-2.0 |
davidwilliams1978/camel | components/camel-http/src/main/java/org/apache/camel/component/http/HttpComponent.java | 17760 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.http;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.apache.camel.Endpoint;
import org.apache.camel.ResolveEndpointFailedException;
import org.apache.camel.http.common.HttpBinding;
import org.apache.camel.http.common.HttpCommonComponent;
import org.apache.camel.http.common.HttpConfiguration;
import org.apache.camel.http.common.UrlRewrite;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.CollectionHelper;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.apache.camel.util.UnsafeUriCharactersEncoder;
import org.apache.commons.httpclient.HttpConnectionManager;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.commons.httpclient.params.HttpClientParams;
import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
/**
* The <a href="http://camel.apache.org/http.html">HTTP Component</a>
*
* @version
*/
public class HttpComponent extends HttpCommonComponent {
protected HttpClientConfigurer httpClientConfigurer;
protected HttpConnectionManager httpConnectionManager;
public HttpComponent() {
super(HttpEndpoint.class);
}
public HttpComponent(Class<? extends HttpEndpoint> endpointClass) {
super(endpointClass);
}
/**
* Creates the HttpClientConfigurer based on the given parameters
*
* @param parameters the map of parameters
* @return the configurer
*/
protected HttpClientConfigurer createHttpClientConfigurer(Map<String, Object> parameters, Set<AuthMethod> authMethods) {
// prefer to use endpoint configured over component configured
// TODO cmueller: remove the "httpClientConfigurerRef" look up in Camel 3.0
HttpClientConfigurer configurer = resolveAndRemoveReferenceParameter(parameters, "httpClientConfigurerRef", HttpClientConfigurer.class);
if (configurer == null) {
// try without ref
configurer = resolveAndRemoveReferenceParameter(parameters, "httpClientConfigurer", HttpClientConfigurer.class);
}
if (configurer == null) {
// fallback to component configured
configurer = getHttpClientConfigurer();
}
// authentication can be endpoint configured
String authUsername = getAndRemoveParameter(parameters, "authUsername", String.class);
String authMethod = getAndRemoveParameter(parameters, "authMethod", String.class);
// validate that if auth username is given then the auth method is also provided
if (authUsername != null && authMethod == null) {
throw new IllegalArgumentException("Option authMethod must be provided to use authentication");
}
if (authMethod != null) {
String authPassword = getAndRemoveParameter(parameters, "authPassword", String.class);
String authDomain = getAndRemoveParameter(parameters, "authDomain", String.class);
String authHost = getAndRemoveParameter(parameters, "authHost", String.class);
configurer = configureAuth(configurer, authMethod, authUsername, authPassword, authDomain, authHost, authMethods);
} else if (httpConfiguration != null) {
// or fallback to use component configuration
configurer = configureAuth(configurer, httpConfiguration.getAuthMethod(), httpConfiguration.getAuthUsername(),
httpConfiguration.getAuthPassword(), httpConfiguration.getAuthDomain(), httpConfiguration.getAuthHost(), authMethods);
}
// proxy authentication can be endpoint configured
String proxyAuthUsername = getAndRemoveParameter(parameters, "proxyAuthUsername", String.class);
String proxyAuthMethod = getAndRemoveParameter(parameters, "proxyAuthMethod", String.class);
// validate that if proxy auth username is given then the proxy auth method is also provided
if (proxyAuthUsername != null && proxyAuthMethod == null) {
throw new IllegalArgumentException("Option proxyAuthMethod must be provided to use proxy authentication");
}
if (proxyAuthMethod != null) {
String proxyAuthPassword = getAndRemoveParameter(parameters, "proxyAuthPassword", String.class);
String proxyAuthDomain = getAndRemoveParameter(parameters, "proxyAuthDomain", String.class);
String proxyAuthHost = getAndRemoveParameter(parameters, "proxyAuthHost", String.class);
configurer = configureProxyAuth(configurer, proxyAuthMethod, proxyAuthUsername, proxyAuthPassword, proxyAuthDomain, proxyAuthHost, authMethods);
} else if (httpConfiguration != null) {
// or fallback to use component configuration
configurer = configureProxyAuth(configurer, httpConfiguration.getProxyAuthMethod(), httpConfiguration.getProxyAuthUsername(),
httpConfiguration.getProxyAuthPassword(), httpConfiguration.getProxyAuthDomain(), httpConfiguration.getProxyAuthHost(), authMethods);
}
return configurer;
}
/**
* Configures the authentication method to be used
*
* @return configurer to used
*/
protected HttpClientConfigurer configureAuth(HttpClientConfigurer configurer, String authMethod, String username,
String password, String domain, String host, Set<AuthMethod> authMethods) {
// no auth is in use
if (username == null && authMethod == null) {
return configurer;
}
// validate mandatory options given
if (username != null && authMethod == null) {
throw new IllegalArgumentException("Option authMethod must be provided to use authentication");
}
ObjectHelper.notNull(authMethod, "authMethod");
ObjectHelper.notNull(username, "authUsername");
ObjectHelper.notNull(password, "authPassword");
AuthMethod auth = getCamelContext().getTypeConverter().convertTo(AuthMethod.class, authMethod);
// add it as a auth method used
authMethods.add(auth);
if (auth == AuthMethod.Basic || auth == AuthMethod.Digest) {
return CompositeHttpConfigurer.combineConfigurers(configurer,
new BasicAuthenticationHttpClientConfigurer(false, username, password));
} else if (auth == AuthMethod.NTLM) {
// domain is mandatory for NTLM
ObjectHelper.notNull(domain, "authDomain");
return CompositeHttpConfigurer.combineConfigurers(configurer,
new NTLMAuthenticationHttpClientConfigurer(false, username, password, domain, host));
}
throw new IllegalArgumentException("Unknown authMethod " + authMethod);
}
/**
* Configures the proxy authentication method to be used
*
* @return configurer to used
*/
protected HttpClientConfigurer configureProxyAuth(HttpClientConfigurer configurer, String authMethod, String username,
String password, String domain, String host, Set<AuthMethod> authMethods) {
// no proxy auth is in use
if (username == null && authMethod == null) {
return configurer;
}
// validate mandatory options given
if (username != null && authMethod == null) {
throw new IllegalArgumentException("Option proxyAuthMethod must be provided to use proxy authentication");
}
ObjectHelper.notNull(authMethod, "proxyAuthMethod");
ObjectHelper.notNull(username, "proxyAuthUsername");
ObjectHelper.notNull(password, "proxyAuthPassword");
AuthMethod auth = getCamelContext().getTypeConverter().convertTo(AuthMethod.class, authMethod);
// add it as a auth method used
authMethods.add(auth);
if (auth == AuthMethod.Basic || auth == AuthMethod.Digest) {
return CompositeHttpConfigurer.combineConfigurers(configurer,
new BasicAuthenticationHttpClientConfigurer(true, username, password));
} else if (auth == AuthMethod.NTLM) {
// domain is mandatory for NTML
ObjectHelper.notNull(domain, "proxyAuthDomain");
return CompositeHttpConfigurer.combineConfigurers(configurer,
new NTLMAuthenticationHttpClientConfigurer(true, username, password, domain, host));
}
throw new IllegalArgumentException("Unknown proxyAuthMethod " + authMethod);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
String addressUri = "http://" + remaining;
if (uri.startsWith("https:")) {
addressUri = "https://" + remaining;
}
Map<String, Object> httpClientParameters = new HashMap<String, Object>(parameters);
// must extract well known parameters before we create the endpoint
// TODO cmueller: remove the "httpBindingRef" look up in Camel 3.0
HttpBinding binding = resolveAndRemoveReferenceParameter(parameters, "httpBindingRef", HttpBinding.class);
if (binding == null) {
// try without ref
binding = resolveAndRemoveReferenceParameter(parameters, "httpBinding", HttpBinding.class);
}
String proxyHost = getAndRemoveParameter(parameters, "proxyHost", String.class);
Integer proxyPort = getAndRemoveParameter(parameters, "proxyPort", Integer.class);
String authMethodPriority = getAndRemoveParameter(parameters, "authMethodPriority", String.class);
HeaderFilterStrategy headerFilterStrategy = resolveAndRemoveReferenceParameter(parameters, "headerFilterStrategy", HeaderFilterStrategy.class);
UrlRewrite urlRewrite = resolveAndRemoveReferenceParameter(parameters, "urlRewrite", UrlRewrite.class);
// http client can be configured from URI options
HttpClientParams clientParams = new HttpClientParams();
IntrospectionSupport.setProperties(clientParams, parameters, "httpClient.");
// validate that we could resolve all httpClient. parameters as this component is lenient
validateParameters(uri, parameters, "httpClient.");
// http client can be configured from URI options
HttpConnectionManagerParams connectionManagerParams = new HttpConnectionManagerParams();
// setup the httpConnectionManagerParams
IntrospectionSupport.setProperties(connectionManagerParams, parameters, "httpConnectionManager.");
validateParameters(uri, parameters, "httpConnectionManager.");
// make sure the component httpConnectionManager is take effect
HttpConnectionManager thisHttpConnectionManager = httpConnectionManager;
if (thisHttpConnectionManager == null) {
// only set the params on the new created http connection manager
thisHttpConnectionManager = new MultiThreadedHttpConnectionManager();
thisHttpConnectionManager.setParams(connectionManagerParams);
}
// create the configurer to use for this endpoint (authMethods contains the used methods created by the configurer)
final Set<AuthMethod> authMethods = new LinkedHashSet<AuthMethod>();
HttpClientConfigurer configurer = createHttpClientConfigurer(parameters, authMethods);
addressUri = UnsafeUriCharactersEncoder.encodeHttpURI(addressUri);
URI endpointUri = URISupport.createRemainingURI(new URI(addressUri), httpClientParameters);
// create the endpoint and connectionManagerParams already be set
HttpEndpoint endpoint = createHttpEndpoint(endpointUri.toString(), this, clientParams, thisHttpConnectionManager, configurer);
if (headerFilterStrategy != null) {
endpoint.setHeaderFilterStrategy(headerFilterStrategy);
} else {
setEndpointHeaderFilterStrategy(endpoint);
}
if (urlRewrite != null) {
// let CamelContext deal with the lifecycle of the url rewrite
// this ensures its being shutdown when Camel shutdown etc.
getCamelContext().addService(urlRewrite);
endpoint.setUrlRewrite(urlRewrite);
}
// prefer to use endpoint configured over component configured
if (binding == null) {
// fallback to component configured
binding = getHttpBinding();
}
if (binding != null) {
endpoint.setBinding(binding);
}
if (proxyHost != null) {
endpoint.setProxyHost(proxyHost);
endpoint.setProxyPort(proxyPort);
} else if (httpConfiguration != null) {
endpoint.setProxyHost(httpConfiguration.getProxyHost());
endpoint.setProxyPort(httpConfiguration.getProxyPort());
}
if (authMethodPriority != null) {
endpoint.setAuthMethodPriority(authMethodPriority);
} else if (httpConfiguration != null && httpConfiguration.getAuthMethodPriority() != null) {
endpoint.setAuthMethodPriority(httpConfiguration.getAuthMethodPriority());
} else {
// no explicit auth method priority configured, so use convention over configuration
// and set priority based on auth method
if (!authMethods.isEmpty()) {
authMethodPriority = CollectionHelper.collectionAsCommaDelimitedString(authMethods);
endpoint.setAuthMethodPriority(authMethodPriority);
}
}
setProperties(endpoint, parameters);
// restructure uri to be based on the parameters left as we dont want to include the Camel internal options
URI httpUri = URISupport.createRemainingURI(new URI(addressUri), parameters);
// validate http uri that end-user did not duplicate the http part that can be a common error
String part = httpUri.getSchemeSpecificPart();
if (part != null) {
part = part.toLowerCase();
if (part.startsWith("//http//") || part.startsWith("//https//") || part.startsWith("//http://") || part.startsWith("//https://")) {
throw new ResolveEndpointFailedException(uri,
"The uri part is not configured correctly. You have duplicated the http(s) protocol.");
}
}
endpoint.setHttpUri(httpUri);
return endpoint;
}
protected HttpEndpoint createHttpEndpoint(String uri, HttpComponent component, HttpClientParams clientParams,
HttpConnectionManager connectionManager, HttpClientConfigurer configurer) throws URISyntaxException {
return new HttpEndpoint(uri, component, clientParams, connectionManager, configurer);
}
public HttpClientConfigurer getHttpClientConfigurer() {
return httpClientConfigurer;
}
/**
* To use the custom HttpClientConfigurer to perform configuration of the HttpClient that will be used.
*/
public void setHttpClientConfigurer(HttpClientConfigurer httpClientConfigurer) {
this.httpClientConfigurer = httpClientConfigurer;
}
public HttpConnectionManager getHttpConnectionManager() {
return httpConnectionManager;
}
/**
* To use a custom HttpConnectionManager to manage connections
*/
public void setHttpConnectionManager(HttpConnectionManager httpConnectionManager) {
this.httpConnectionManager = httpConnectionManager;
}
/**
* To use a custom HttpBinding to control the mapping between Camel message and HttpClient.
*/
@Override
public void setHttpBinding(HttpBinding httpBinding) {
// need to override and call super for component docs
super.setHttpBinding(httpBinding);
}
/**
* To use the shared HttpConfiguration as base configuration.
*/
@Override
public void setHttpConfiguration(HttpConfiguration httpConfiguration) {
// need to override and call super for component docs
super.setHttpConfiguration(httpConfiguration);
}
/**
* Whether to allow java serialization when a request uses context-type=application/x-java-serialized-object
* <p/>
* This is by default turned off. If you enable this then be aware that Java will deserialize the incoming
* data from the request to Java and that can be a potential security risk.
*/
@Override
public void setAllowJavaSerializedObject(boolean allowJavaSerializedObject) {
// need to override and call super for component docs
super.setAllowJavaSerializedObject(allowJavaSerializedObject);
}
}
| apache-2.0 |
shun634501730/java_source_cn | src_en/com/sun/org/apache/xml/internal/dtm/ref/EmptyIterator.java | 1933 | /*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* $Id: EmptyIterator.java,v 1.2.4.1 2005/09/15 08:15:05 suresh_emailid Exp $
*/
package com.sun.org.apache.xml.internal.dtm.ref;
import com.sun.org.apache.xml.internal.dtm.DTMAxisIterator;
import com.sun.org.apache.xml.internal.dtm.DTM;
/**
* DTM Empty Axis Iterator. The class is immutable
*/
public final class EmptyIterator implements DTMAxisIterator
{
private static final EmptyIterator INSTANCE = new EmptyIterator();
public static DTMAxisIterator getInstance() {return INSTANCE;}
private EmptyIterator(){}
public final int next(){ return END; }
public final DTMAxisIterator reset(){ return this; }
public final int getLast(){ return 0; }
public final int getPosition(){ return 1; }
public final void setMark(){}
public final void gotoMark(){}
public final DTMAxisIterator setStartNode(int node){ return this; }
public final int getStartNode(){ return END; }
public final boolean isReverse(){return false;}
public final DTMAxisIterator cloneIterator(){ return this; }
public final void setRestartable(boolean isRestartable) {}
public final int getNodeByPosition(int position){ return END; }
}
| apache-2.0 |
lasombra/rhiot | cloudplatform/service/device/src/main/java/io/rhiot/cloudplatform/service/device/spring/DeviceServiceConfiguration.java | 2125 | /**
* Licensed to the Rhiot under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rhiot.cloudplatform.service.device.spring;
import com.mongodb.Mongo;
import io.rhiot.cloudplatform.encoding.spi.PayloadEncoding;
import io.rhiot.cloudplatform.service.binding.ServiceBinding;
import io.rhiot.cloudplatform.service.device.MongoDbDeviceRegistry;
import io.rhiot.cloudplatform.service.device.api.DeviceRegistry;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class DeviceServiceConfiguration {
@ConditionalOnMissingBean(name = "deviceServiceBinding")
@Bean
ServiceBinding deviceServiceBinding(PayloadEncoding payloadEncoding) {
return new ServiceBinding(payloadEncoding, "device");
}
@ConditionalOnMissingBean
@Bean(name = "device")
DeviceRegistry deviceRegistry(Mongo mongo,
@Value("${device.metrics.mongodb.db:rhiot}") String db,
@Value("${device.metrics.mongodb.db:device}") String collection,
@Value("${disconnectionPeriod:60000}") long disconnectionPeriod) {
return new MongoDbDeviceRegistry(mongo, db, collection, disconnectionPeriod);
}
} | apache-2.0 |
objectiser/camel | components/camel-spring-boot/src/main/java/org/apache/camel/spring/boot/model/RouteDetails.java | 6707 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.boot.model;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonInclude;
import org.apache.camel.api.management.mbean.ManagedRouteMBean;
import org.apache.camel.api.management.mbean.RouteError;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
public class RouteDetails {
private long deltaProcessingTime;
private long exchangesInflight;
private long exchangesTotal;
private long externalRedeliveries;
private long failuresHandled;
private String firstExchangeCompletedExchangeId;
private Date firstExchangeCompletedTimestamp;
private String firstExchangeFailureExchangeId;
private Date firstExchangeFailureTimestamp;
private String lastExchangeCompletedExchangeId;
private Date lastExchangeCompletedTimestamp;
private String lastExchangeFailureExchangeId;
private Date lastExchangeFailureTimestamp;
private long lastProcessingTime;
private String load01;
private String load05;
private String load15;
private long maxProcessingTime;
private long meanProcessingTime;
private long minProcessingTime;
private Long oldestInflightDuration;
private String oldestInflightExchangeId;
private long redeliveries;
private long totalProcessingTime;
private RouteError lastError;
private boolean hasRouteController;
public RouteDetails(ManagedRouteMBean managedRoute) {
try {
this.deltaProcessingTime = managedRoute.getDeltaProcessingTime();
this.exchangesInflight = managedRoute.getExchangesInflight();
this.exchangesTotal = managedRoute.getExchangesTotal();
this.externalRedeliveries = managedRoute.getExternalRedeliveries();
this.failuresHandled = managedRoute.getFailuresHandled();
this.firstExchangeCompletedExchangeId = managedRoute.getFirstExchangeCompletedExchangeId();
this.firstExchangeCompletedTimestamp = managedRoute.getFirstExchangeCompletedTimestamp();
this.firstExchangeFailureExchangeId = managedRoute.getFirstExchangeFailureExchangeId();
this.firstExchangeFailureTimestamp = managedRoute.getFirstExchangeFailureTimestamp();
this.lastExchangeCompletedExchangeId = managedRoute.getLastExchangeCompletedExchangeId();
this.lastExchangeCompletedTimestamp = managedRoute.getLastExchangeCompletedTimestamp();
this.lastExchangeFailureExchangeId = managedRoute.getLastExchangeFailureExchangeId();
this.lastExchangeFailureTimestamp = managedRoute.getLastExchangeFailureTimestamp();
this.lastProcessingTime = managedRoute.getLastProcessingTime();
this.load01 = managedRoute.getLoad01();
this.load05 = managedRoute.getLoad05();
this.load15 = managedRoute.getLoad15();
this.maxProcessingTime = managedRoute.getMaxProcessingTime();
this.meanProcessingTime = managedRoute.getMeanProcessingTime();
this.minProcessingTime = managedRoute.getMinProcessingTime();
this.oldestInflightDuration = managedRoute.getOldestInflightDuration();
this.oldestInflightExchangeId = managedRoute.getOldestInflightExchangeId();
this.redeliveries = managedRoute.getRedeliveries();
this.totalProcessingTime = managedRoute.getTotalProcessingTime();
this.lastError = managedRoute.getLastError();
this.hasRouteController = managedRoute.getHasRouteController();
} catch (Exception e) {
// Ignore
}
}
public long getDeltaProcessingTime() {
return deltaProcessingTime;
}
public long getExchangesInflight() {
return exchangesInflight;
}
public long getExchangesTotal() {
return exchangesTotal;
}
public long getExternalRedeliveries() {
return externalRedeliveries;
}
public long getFailuresHandled() {
return failuresHandled;
}
public String getFirstExchangeCompletedExchangeId() {
return firstExchangeCompletedExchangeId;
}
public Date getFirstExchangeCompletedTimestamp() {
return firstExchangeCompletedTimestamp;
}
public String getFirstExchangeFailureExchangeId() {
return firstExchangeFailureExchangeId;
}
public Date getFirstExchangeFailureTimestamp() {
return firstExchangeFailureTimestamp;
}
public String getLastExchangeCompletedExchangeId() {
return lastExchangeCompletedExchangeId;
}
public Date getLastExchangeCompletedTimestamp() {
return lastExchangeCompletedTimestamp;
}
public String getLastExchangeFailureExchangeId() {
return lastExchangeFailureExchangeId;
}
public Date getLastExchangeFailureTimestamp() {
return lastExchangeFailureTimestamp;
}
public long getLastProcessingTime() {
return lastProcessingTime;
}
public String getLoad01() {
return load01;
}
public String getLoad05() {
return load05;
}
public String getLoad15() {
return load15;
}
public long getMaxProcessingTime() {
return maxProcessingTime;
}
public long getMeanProcessingTime() {
return meanProcessingTime;
}
public long getMinProcessingTime() {
return minProcessingTime;
}
public Long getOldestInflightDuration() {
return oldestInflightDuration;
}
public String getOldestInflightExchangeId() {
return oldestInflightExchangeId;
}
public long getRedeliveries() {
return redeliveries;
}
public long getTotalProcessingTime() {
return totalProcessingTime;
}
public RouteError getLastError() {
return lastError;
}
public boolean getHasRouteController() {
return hasRouteController;
}
}
| apache-2.0 |
mdeinum/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/reactive/function/client/ClientHttpConnectorConfiguration.java | 3867 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.reactive.function.client;
import org.apache.hc.client5.http.impl.async.HttpAsyncClients;
import org.apache.hc.core5.http.nio.AsyncRequestProducer;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.http.client.reactive.ClientHttpConnector;
import org.springframework.http.client.reactive.HttpComponentsClientHttpConnector;
import org.springframework.http.client.reactive.JettyClientHttpConnector;
import org.springframework.http.client.reactive.JettyResourceFactory;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import org.springframework.http.client.reactive.ReactorResourceFactory;
/**
* Configuration classes for WebClient client connectors.
* <p>
* Those should be {@code @Import} in a regular auto-configuration class to guarantee
* their order of execution.
*
* @author Brian Clozel
*/
@Configuration(proxyBeanMethods = false)
class ClientHttpConnectorConfiguration {
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(reactor.netty.http.client.HttpClient.class)
@ConditionalOnMissingBean(ClientHttpConnector.class)
static class ReactorNetty {
@Bean
@ConditionalOnMissingBean
ReactorResourceFactory reactorClientResourceFactory() {
return new ReactorResourceFactory();
}
@Bean
@Lazy
ReactorClientHttpConnector reactorClientHttpConnector(ReactorResourceFactory reactorResourceFactory,
ObjectProvider<ReactorNettyHttpClientMapper> mapperProvider) {
ReactorNettyHttpClientMapper mapper = mapperProvider.orderedStream()
.reduce((before, after) -> (client) -> after.configure(before.configure(client)))
.orElse((client) -> client);
return new ReactorClientHttpConnector(reactorResourceFactory, mapper::configure);
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass(org.eclipse.jetty.reactive.client.ReactiveRequest.class)
@ConditionalOnMissingBean(ClientHttpConnector.class)
static class JettyClient {
@Bean
@ConditionalOnMissingBean
JettyResourceFactory jettyClientResourceFactory() {
return new JettyResourceFactory();
}
@Bean
@Lazy
JettyClientHttpConnector jettyClientHttpConnector(JettyResourceFactory jettyResourceFactory) {
SslContextFactory sslContextFactory = new SslContextFactory.Client();
HttpClient httpClient = new HttpClient(sslContextFactory);
return new JettyClientHttpConnector(httpClient, jettyResourceFactory);
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnClass({ HttpAsyncClients.class, AsyncRequestProducer.class })
@ConditionalOnMissingBean(ClientHttpConnector.class)
static class HttpClient5 {
@Bean
@Lazy
HttpComponentsClientHttpConnector httpComponentsClientHttpConnector() {
return new HttpComponentsClientHttpConnector();
}
}
}
| apache-2.0 |
nishantmonu51/druid | indexing-service/src/main/java/org/apache/druid/indexing/common/SingleFileTaskReportFileWriter.java | 1920 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.logger.Logger;
import java.io.File;
import java.util.Map;
public class SingleFileTaskReportFileWriter implements TaskReportFileWriter
{
private static final Logger log = new Logger(SingleFileTaskReportFileWriter.class);
private final File reportsFile;
private ObjectMapper objectMapper;
public SingleFileTaskReportFileWriter(File reportsFile)
{
this.reportsFile = reportsFile;
}
@Override
public void write(String taskId, Map<String, TaskReport> reports)
{
try {
final File reportsFileParent = reportsFile.getParentFile();
if (reportsFileParent != null) {
FileUtils.mkdirp(reportsFileParent);
}
objectMapper.writeValue(reportsFile, reports);
}
catch (Exception e) {
log.error(e, "Encountered exception in write().");
}
}
@Override
public void setObjectMapper(ObjectMapper objectMapper)
{
this.objectMapper = objectMapper;
}
}
| apache-2.0 |
gh351135612/presto | presto-main/src/main/java/com/facebook/presto/sql/planner/iterative/rule/MergeLimitWithTopN.java | 2095 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.iterative.rule;
import com.facebook.presto.matching.Capture;
import com.facebook.presto.matching.Captures;
import com.facebook.presto.matching.Pattern;
import com.facebook.presto.sql.planner.iterative.Rule;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import static com.facebook.presto.matching.Capture.newCapture;
import static com.facebook.presto.sql.planner.plan.Patterns.limit;
import static com.facebook.presto.sql.planner.plan.Patterns.source;
import static com.facebook.presto.sql.planner.plan.Patterns.topN;
public class MergeLimitWithTopN
implements Rule<LimitNode>
{
private static final Capture<TopNNode> CHILD = newCapture();
private static final Pattern<LimitNode> PATTERN = limit()
.with(source().matching(topN().capturedAs(CHILD)));
@Override
public Pattern<LimitNode> getPattern()
{
return PATTERN;
}
@Override
public Result apply(LimitNode parent, Captures captures, Context context)
{
TopNNode child = captures.get(CHILD);
return Result.ofPlanNode(
new TopNNode(
parent.getId(),
child.getSource(),
Math.min(parent.getCount(), child.getCount()),
child.getOrderBy(),
child.getOrderings(),
parent.isPartial() ? TopNNode.Step.PARTIAL : TopNNode.Step.SINGLE));
}
}
| apache-2.0 |
pdxrunner/geode | geode-lucene/src/test/java/org/apache/geode/cache/lucene/internal/xml/LuceneIndexXmlParserJUnitTest.java | 4967 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.lucene.internal.xml;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Map;
import java.util.Stack;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
import org.apache.geode.internal.cache.extension.SimpleExtensionPoint;
import org.apache.geode.internal.cache.xmlcache.CacheCreation;
import org.apache.geode.internal.cache.xmlcache.RegionAttributesCreation;
import org.apache.geode.internal.cache.xmlcache.RegionCreation;
import org.apache.geode.internal.cache.xmlcache.XmlGeneratorUtils;
import org.apache.geode.test.junit.categories.LuceneTest;
@Category({LuceneTest.class})
public class LuceneIndexXmlParserJUnitTest {
private LuceneXmlParser parser;
private RegionCreation rc;
private Stack<Object> stack;
@Before
public void setUp() {
this.parser = new LuceneXmlParser();
CacheCreation cache = Mockito.mock(CacheCreation.class);
RegionCreation regionCreation = Mockito.mock(RegionCreation.class);
RegionAttributesCreation rac = Mockito.mock(RegionAttributesCreation.class);
Mockito.when(regionCreation.getFullPath()).thenReturn("/region");
Mockito.when(regionCreation.getAttributes()).thenReturn(rac);
Mockito.when(regionCreation.getExtensionPoint())
.thenReturn(new SimpleExtensionPoint(this.rc, this.rc));
this.rc = regionCreation;
this.stack = new Stack<Object>();
stack.push(cache);
stack.push(rc);
this.parser.setStack(stack);
}
@After
public void tearDown() {
this.parser = null;
this.rc = null;
this.stack = null;
}
@Test
public void generateWithFields() throws SAXException {
AttributesImpl attrs = new AttributesImpl();
XmlGeneratorUtils.addAttribute(attrs, LuceneXmlConstants.NAME, "index");
this.parser.startElement(LuceneXmlConstants.NAMESPACE, LuceneXmlConstants.INDEX, null, attrs);
addField("field1");
addField("field2");
addField("field3", KeywordAnalyzer.class.getName());
this.parser.endElement(LuceneXmlConstants.NAMESPACE, LuceneXmlConstants.INDEX, null);
assertEquals(this.rc, this.stack.peek());
LuceneIndexCreation index =
(LuceneIndexCreation) this.rc.getExtensionPoint().getExtensions().iterator().next();
assertEquals("index", index.getName());
assertArrayEquals(new String[] {"field1", "field2", "field3"}, index.getFieldNames());
// Assert analyzers
Map<String, Analyzer> fieldAnalyzers = index.getFieldAnalyzers();
assertEquals(1, fieldAnalyzers.size());
assertTrue(fieldAnalyzers.containsKey("field3"));
assertTrue(fieldAnalyzers.get("field3") instanceof KeywordAnalyzer);
}
@Test
public void attemptInvalidAnalyzerClass() throws SAXException {
AttributesImpl attrs = new AttributesImpl();
XmlGeneratorUtils.addAttribute(attrs, LuceneXmlConstants.NAME, "index");
this.parser.startElement(LuceneXmlConstants.NAMESPACE, LuceneXmlConstants.INDEX, null, attrs);
try {
addField("field", "some.invalid.class");
fail("Should not have been able to add a field with an invalid analyzer class name");
} catch (Exception e) {
}
}
private void addField(String fieldName) throws SAXException {
addField(fieldName, null);
}
private void addField(String fieldName, String analyzerClassName) throws SAXException {
AttributesImpl field = new AttributesImpl();
XmlGeneratorUtils.addAttribute(field, LuceneXmlConstants.NAME, fieldName);
if (analyzerClassName != null) {
XmlGeneratorUtils.addAttribute(field, LuceneXmlConstants.ANALYZER, analyzerClassName);
}
this.parser.startElement(LuceneXmlConstants.NAMESPACE, LuceneXmlConstants.FIELD, null, field);
this.parser.endElement(LuceneXmlConstants.NAMESPACE, LuceneXmlConstants.FIELD, null);
}
}
| apache-2.0 |
titusfortner/selenium | java/test/org/openqa/selenium/grid/sessionmap/jdbc/JdbcBackedSessionMapTest.java | 4541 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.grid.sessionmap.jdbc;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.openqa.selenium.ImmutableCapabilities;
import org.openqa.selenium.NoSuchSessionException;
import org.openqa.selenium.events.EventBus;
import org.openqa.selenium.events.local.GuavaEventBus;
import org.openqa.selenium.grid.data.Session;
import org.openqa.selenium.grid.sessionmap.SessionMap;
import org.openqa.selenium.remote.SessionId;
import org.openqa.selenium.remote.tracing.DefaultTestTracer;
import org.openqa.selenium.remote.tracing.Tracer;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.time.Instant;
import java.util.UUID;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
public class JdbcBackedSessionMapTest {
private static Connection connection;
private static EventBus bus;
private static final Tracer tracer = DefaultTestTracer.createTracer();
@BeforeClass
public static void createDB() throws SQLException {
bus = new GuavaEventBus();
connection = DriverManager.getConnection("jdbc:hsqldb:mem:testdb", "SA", "");
Statement createStatement = connection.createStatement();
createStatement.executeUpdate(
"create table sessions_map (session_ids varchar(50), session_uri varchar(30), session_stereotype varchar(300), session_caps varchar(300), session_start varchar(128));");
}
@AfterClass
public static void killDBConnection() throws SQLException {
connection.close();
}
@Test(expected = NoSuchSessionException.class)
public void shouldThrowNoSuchSessionExceptionIfSessionDoesNotExists() {
SessionMap sessions = getSessionMap();
sessions.get(new SessionId(UUID.randomUUID()));
}
@Test(expected = IllegalArgumentException.class)
public void shouldThrowIllegalArgumentExceptionIfConnectionObjectIsNull() {
SessionMap sessions = new JdbcBackedSessionMap(tracer, null, bus);
}
@Test(expected = JdbcException.class)
public void shouldThrowNoSuchSessionExceptionIfTableDoesNotExist() throws SQLException {
Connection connection2 = DriverManager.getConnection("jdbc:hsqldb:mem:testdb2", "SA", "");
SessionMap sessions = new JdbcBackedSessionMap(tracer, connection2, bus);
sessions.get(new SessionId(UUID.randomUUID()));
}
@Test
public void canCreateAJdbcBackedSessionMap() throws URISyntaxException {
SessionMap sessions = getSessionMap();
Session expected = new Session(
new SessionId(UUID.randomUUID()),
new URI("http://example.com/foo"),
new ImmutableCapabilities("foo", "bar"),
new ImmutableCapabilities("key", "value"),
Instant.now());
sessions.add(expected);
SessionMap reader = getSessionMap();
Session seen = reader.get(expected.getId());
assertThat(seen).isEqualTo(expected);
}
@Test
public void shouldBeAbleToRemoveSessions() throws URISyntaxException {
SessionMap sessions = getSessionMap();
Session expected = new Session(
new SessionId(UUID.randomUUID()),
new URI("http://example.com/foo"),
new ImmutableCapabilities("foo", "bar"),
new ImmutableCapabilities("key", "value"),
Instant.now());
sessions.add(expected);
SessionMap reader = getSessionMap();
reader.remove(expected.getId());
try {
reader.get(expected.getId());
fail("Oh noes!");
} catch (NoSuchSessionException ignored) {
// This is expected
}
}
private JdbcBackedSessionMap getSessionMap() {
return new JdbcBackedSessionMap(tracer, connection, bus);
}
}
| apache-2.0 |
shun634501730/java_source_cn | src_en/javax/imageio/stream/ImageInputStream.java | 39262 | /*
* Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package javax.imageio.stream;
import java.io.Closeable;
import java.io.DataInput;
import java.io.IOException;
import java.nio.ByteOrder;
/**
* A seekable input stream interface for use by
* <code>ImageReader</code>s. Various input sources, such as
* <code>InputStream</code>s and <code>File</code>s,
* as well as future fast I/O sources may be "wrapped" by a suitable
* implementation of this interface for use by the Image I/O API.
*
* @see ImageInputStreamImpl
* @see FileImageInputStream
* @see FileCacheImageInputStream
* @see MemoryCacheImageInputStream
*
*/
public interface ImageInputStream extends DataInput, Closeable {
/**
* Sets the desired byte order for future reads of data values
* from this stream. For example, the sequence of bytes '0x01
* 0x02 0x03 0x04' if read as a 4-byte integer would have the
* value '0x01020304' using network byte order and the value
* '0x04030201' under the reverse byte order.
*
* <p> The enumeration class <code>java.nio.ByteOrder</code> is
* used to specify the byte order. A value of
* <code>ByteOrder.BIG_ENDIAN</code> specifies so-called
* big-endian or network byte order, in which the high-order byte
* comes first. Motorola and Sparc processors store data in this
* format, while Intel processors store data in the reverse
* <code>ByteOrder.LITTLE_ENDIAN</code> order.
*
* <p> The byte order has no effect on the results returned from
* the <code>readBits</code> method (or the value written by
* <code>ImageOutputStream.writeBits</code>).
*
* @param byteOrder one of <code>ByteOrder.BIG_ENDIAN</code> or
* <code>java.nio.ByteOrder.LITTLE_ENDIAN</code>, indicating whether
* network byte order or its reverse will be used for future
* reads.
*
* @see java.nio.ByteOrder
* @see #getByteOrder
* @see #readBits(int)
*/
void setByteOrder(ByteOrder byteOrder);
/**
* Returns the byte order with which data values will be read from
* this stream as an instance of the
* <code>java.nio.ByteOrder</code> enumeration.
*
* @return one of <code>ByteOrder.BIG_ENDIAN</code> or
* <code>ByteOrder.LITTLE_ENDIAN</code>, indicating which byte
* order is being used.
*
* @see java.nio.ByteOrder
* @see #setByteOrder
*/
ByteOrder getByteOrder();
/**
* Reads a single byte from the stream and returns it as an
* integer between 0 and 255. If the end of the stream is
* reached, -1 is returned.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a byte value from the stream, as an int, or -1 to
* indicate EOF.
*
* @exception IOException if an I/O error occurs.
*/
int read() throws IOException;
/**
* Reads up to <code>b.length</code> bytes from the stream, and
* stores them into <code>b</code> starting at index 0. The
* number of bytes read is returned. If no bytes can be read
* because the end of the stream has been reached, -1 is returned.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param b an array of bytes to be written to.
*
* @return the number of bytes actually read, or <code>-1</code>
* to indicate EOF.
*
* @exception NullPointerException if <code>b</code> is
* <code>null</code>.
*
* @exception IOException if an I/O error occurs.
*/
int read(byte[] b) throws IOException;
/**
* Reads up to <code>len</code> bytes from the stream, and stores
* them into <code>b</code> starting at index <code>off</code>.
* The number of bytes read is returned. If no bytes can be read
* because the end of the stream has been reached, <code>-1</code>
* is returned.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param b an array of bytes to be written to.
* @param off the starting position within <code>b</code> to write to.
* @param len the maximum number of <code>byte</code>s to read.
*
* @return the number of bytes actually read, or <code>-1</code>
* to indicate EOF.
*
* @exception NullPointerException if <code>b</code> is
* <code>null</code>.
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>b.length</code>.
* @exception IOException if an I/O error occurs.
*/
int read(byte[] b, int off, int len) throws IOException;
/**
* Reads up to <code>len</code> bytes from the stream, and
* modifies the supplied <code>IIOByteBuffer</code> to indicate
* the byte array, offset, and length where the data may be found.
* The caller should not attempt to modify the data found in the
* <code>IIOByteBuffer</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param buf an IIOByteBuffer object to be modified.
* @param len the maximum number of <code>byte</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>len</code> is
* negative.
* @exception NullPointerException if <code>buf</code> is
* <code>null</code>.
*
* @exception IOException if an I/O error occurs.
*/
void readBytes(IIOByteBuffer buf, int len) throws IOException;
/**
* Reads a byte from the stream and returns a <code>boolean</code>
* value of <code>true</code> if it is nonzero, <code>false</code>
* if it is zero.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a boolean value from the stream.
*
* @exception java.io.EOFException if the end of the stream is reached.
* @exception IOException if an I/O error occurs.
*/
boolean readBoolean() throws IOException;
/**
* Reads a byte from the stream and returns it as a
* <code>byte</code> value. Byte values between <code>0x00</code>
* and <code>0x7f</code> represent integer values between
* <code>0</code> and <code>127</code>. Values between
* <code>0x80</code> and <code>0xff</code> represent negative
* values from <code>-128</code> to <code>/1</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a signed byte value from the stream.
*
* @exception java.io.EOFException if the end of the stream is reached.
* @exception IOException if an I/O error occurs.
*/
byte readByte() throws IOException;
/**
* Reads a byte from the stream, and (conceptually) converts it to
* an int, masks it with <code>0xff</code> in order to strip off
* any sign-extension bits, and returns it as a <code>byte</code>
* value.
*
* <p> Thus, byte values between <code>0x00</code> and
* <code>0x7f</code> are simply returned as integer values between
* <code>0</code> and <code>127</code>. Values between
* <code>0x80</code> and <code>0xff</code>, which normally
* represent negative <code>byte</code>values, will be mapped into
* positive integers between <code>128</code> and
* <code>255</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return an unsigned byte value from the stream.
*
* @exception java.io.EOFException if the end of the stream is reached.
* @exception IOException if an I/O error occurs.
*/
int readUnsignedByte() throws IOException;
/**
* Reads two bytes from the stream, and (conceptually)
* concatenates them according to the current byte order, and
* returns the result as a <code>short</code> value.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a signed short value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
short readShort() throws IOException;
/**
* Reads two bytes from the stream, and (conceptually)
* concatenates them according to the current byte order, converts
* the resulting value to an <code>int</code>, masks it with
* <code>0xffff</code> in order to strip off any sign-extension
* buts, and returns the result as an unsigned <code>int</code>
* value.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return an unsigned short value from the stream, as an int.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
int readUnsignedShort() throws IOException;
/**
* Equivalent to <code>readUnsignedShort</code>, except that the
* result is returned using the <code>char</code> datatype.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return an unsigned char value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #readUnsignedShort
*/
char readChar() throws IOException;
/**
* Reads 4 bytes from the stream, and (conceptually) concatenates
* them according to the current byte order and returns the result
* as an <code>int</code>.
*
* <p> The bit offset within the stream is ignored and treated as
* though it were zero.
*
* @return a signed int value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
int readInt() throws IOException;
/**
* Reads 4 bytes from the stream, and (conceptually) concatenates
* them according to the current byte order, converts the result
* to a long, masks it with <code>0xffffffffL</code> in order to
* strip off any sign-extension bits, and returns the result as an
* unsigned <code>long</code> value.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return an unsigned int value from the stream, as a long.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
long readUnsignedInt() throws IOException;
/**
* Reads 8 bytes from the stream, and (conceptually) concatenates
* them according to the current byte order and returns the result
* as a <code>long</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a signed long value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
long readLong() throws IOException;
/**
* Reads 4 bytes from the stream, and (conceptually) concatenates
* them according to the current byte order and returns the result
* as a <code>float</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a float value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
float readFloat() throws IOException;
/**
* Reads 8 bytes from the stream, and (conceptually) concatenates
* them according to the current byte order and returns the result
* as a <code>double</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a double value from the stream.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*
* @see #getByteOrder
*/
double readDouble() throws IOException;
/**
* Reads the next line of text from the input stream. It reads
* successive bytes, converting each byte separately into a
* character, until it encounters a line terminator or end of
* file; the characters read are then returned as a
* <code>String</code>. Note that because this method processes
* bytes, it does not support input of the full Unicode character
* set.
*
* <p> If end of file is encountered before even one byte can be
* read, then <code>null</code> is returned. Otherwise, each byte
* that is read is converted to type <code>char</code> by
* zero-extension. If the character <code>'\n'</code> is
* encountered, it is discarded and reading ceases. If the
* character <code>'\r'</code> is encountered, it is discarded
* and, if the following byte converts  to the character
* <code>'\n'</code>, then that is discarded also; reading then
* ceases. If end of file is encountered before either of the
* characters <code>'\n'</code> and <code>'\r'</code> is
* encountered, reading ceases. Once reading has ceased, a
* <code>String</code> is returned that contains all the
* characters read and not discarded, taken in order. Note that
* every character in this string will have a value less than
* <code>\u0100</code>, that is, <code>(char)256</code>.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @return a String containing a line of text from the stream.
*
* @exception IOException if an I/O error occurs.
*/
String readLine() throws IOException;
/**
* Reads in a string that has been encoded using a
* <a href="../../../java/io/DataInput.html#modified-utf-8">modified
* UTF-8</a>
* format. The general contract of <code>readUTF</code> is that
* it reads a representation of a Unicode character string encoded
* in modified UTF-8 format; this string of characters is
* then returned as a <code>String</code>.
*
* <p> First, two bytes are read and used to construct an unsigned
* 16-bit integer in the manner of the
* <code>readUnsignedShort</code> method, using network byte order
* (regardless of the current byte order setting). This integer
* value is called the <i>UTF length</i> and specifies the number
* of additional bytes to be read. These bytes are then converted
* to characters by considering them in groups. The length of each
* group is computed from the value of the first byte of the
* group. The byte following a group, if any, is the first byte of
* the next group.
*
* <p> If the first byte of a group matches the bit pattern
* <code>0xxxxxxx</code> (where <code>x</code> means "may be
* <code>0</code> or <code>1</code>"), then the group consists of
* just that byte. The byte is zero-extended to form a character.
*
* <p> If the first byte of a group matches the bit pattern
* <code>110xxxxx</code>, then the group consists of that byte
* <code>a</code> and a second byte <code>b</code>. If there is no
* byte <code>b</code> (because byte <code>a</code> was the last
* of the bytes to be read), or if byte <code>b</code> does not
* match the bit pattern <code>10xxxxxx</code>, then a
* <code>UTFDataFormatException</code> is thrown. Otherwise, the
* group is converted to the character:
*
* <p> <pre><code>
* (char)(((a& 0x1F) << 6) | (b & 0x3F))
* </code></pre>
*
* If the first byte of a group matches the bit pattern
* <code>1110xxxx</code>, then the group consists of that byte
* <code>a</code> and two more bytes <code>b</code> and
* <code>c</code>. If there is no byte <code>c</code> (because
* byte <code>a</code> was one of the last two of the bytes to be
* read), or either byte <code>b</code> or byte <code>c</code>
* does not match the bit pattern <code>10xxxxxx</code>, then a
* <code>UTFDataFormatException</code> is thrown. Otherwise, the
* group is converted to the character:
*
* <p> <pre><code>
* (char)(((a & 0x0F) << 12) | ((b & 0x3F) << 6) | (c & 0x3F))
* </code></pre>
*
* If the first byte of a group matches the pattern
* <code>1111xxxx</code> or the pattern <code>10xxxxxx</code>,
* then a <code>UTFDataFormatException</code> is thrown.
*
* <p> If end of file is encountered at any time during this
* entire process, then an <code>java.io.EOFException</code> is thrown.
*
* <p> After every group has been converted to a character by this
* process, the characters are gathered, in the same order in
* which their corresponding groups were read from the input
* stream, to form a <code>String</code>, which is returned.
*
* <p> The current byte order setting is ignored.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* <p><strong>Note:</strong> This method should not be used in
* the implementation of image formats that use standard UTF-8,
* because the modified UTF-8 used here is incompatible with
* standard UTF-8.
*
* @return a String read from the stream.
*
* @exception java.io.EOFException if this stream reaches the end
* before reading all the bytes.
* @exception java.io.UTFDataFormatException if the bytes do not represent
* a valid modified UTF-8 encoding of a string.
* @exception IOException if an I/O error occurs.
*/
String readUTF() throws IOException;
/**
* Reads <code>len</code> bytes from the stream, and stores them
* into <code>b</code> starting at index <code>off</code>.
* If the end of the stream is reached, an <code>java.io.EOFException</code>
* will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param b an array of bytes to be written to.
* @param off the starting position within <code>b</code> to write to.
* @param len the maximum number of <code>byte</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>b.length</code>.
* @exception NullPointerException if <code>b</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(byte[] b, int off, int len) throws IOException;
/**
* Reads <code>b.length</code> bytes from the stream, and stores them
* into <code>b</code> starting at index <code>0</code>.
* If the end of the stream is reached, an <code>java.io.EOFException</code>
* will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param b an array of <code>byte</code>s.
*
* @exception NullPointerException if <code>b</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(byte[] b) throws IOException;
/**
* Reads <code>len</code> shorts (signed 16-bit integers) from the
* stream according to the current byte order, and
* stores them into <code>s</code> starting at index
* <code>off</code>. If the end of the stream is reached, an
* <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param s an array of shorts to be written to.
* @param off the starting position within <code>s</code> to write to.
* @param len the maximum number of <code>short</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>s.length</code>.
* @exception NullPointerException if <code>s</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(short[] s, int off, int len) throws IOException;
/**
* Reads <code>len</code> chars (unsigned 16-bit integers) from the
* stream according to the current byte order, and
* stores them into <code>c</code> starting at index
* <code>off</code>. If the end of the stream is reached, an
* <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param c an array of chars to be written to.
* @param off the starting position within <code>c</code> to write to.
* @param len the maximum number of <code>char</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>c.length</code>.
* @exception NullPointerException if <code>c</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(char[] c, int off, int len) throws IOException;
/**
* Reads <code>len</code> ints (signed 32-bit integers) from the
* stream according to the current byte order, and
* stores them into <code>i</code> starting at index
* <code>off</code>. If the end of the stream is reached, an
* <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param i an array of ints to be written to.
* @param off the starting position within <code>i</code> to write to.
* @param len the maximum number of <code>int</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>i.length</code>.
* @exception NullPointerException if <code>i</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(int[] i, int off, int len) throws IOException;
/**
* Reads <code>len</code> longs (signed 64-bit integers) from the
* stream according to the current byte order, and
* stores them into <code>l</code> starting at index
* <code>off</code>. If the end of the stream is reached, an
* <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param l an array of longs to be written to.
* @param off the starting position within <code>l</code> to write to.
* @param len the maximum number of <code>long</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>l.length</code>.
* @exception NullPointerException if <code>l</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(long[] l, int off, int len) throws IOException;
/**
* Reads <code>len</code> floats (32-bit IEEE single-precision
* floats) from the stream according to the current byte order,
* and stores them into <code>f</code> starting at
* index <code>off</code>. If the end of the stream is reached,
* an <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param f an array of floats to be written to.
* @param off the starting position within <code>f</code> to write to.
* @param len the maximum number of <code>float</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>f.length</code>.
* @exception NullPointerException if <code>f</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(float[] f, int off, int len) throws IOException;
/**
* Reads <code>len</code> doubles (64-bit IEEE double-precision
* floats) from the stream according to the current byte order,
* and stores them into <code>d</code> starting at
* index <code>off</code>. If the end of the stream is reached,
* an <code>java.io.EOFException</code> will be thrown.
*
* <p> The bit offset within the stream is reset to zero before
* the read occurs.
*
* @param d an array of doubles to be written to.
* @param off the starting position within <code>d</code> to write to.
* @param len the maximum number of <code>double</code>s to read.
*
* @exception IndexOutOfBoundsException if <code>off</code> is
* negative, <code>len</code> is negative, or <code>off +
* len</code> is greater than <code>d.length</code>.
* @exception NullPointerException if <code>d</code> is
* <code>null</code>.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bytes.
* @exception IOException if an I/O error occurs.
*/
void readFully(double[] d, int off, int len) throws IOException;
/**
* Returns the current byte position of the stream. The next read
* will take place starting at this offset.
*
* @return a long containing the position of the stream.
*
* @exception IOException if an I/O error occurs.
*/
long getStreamPosition() throws IOException;
/**
* Returns the current bit offset, as an integer between 0 and 7,
* inclusive. The bit offset is updated implicitly by calls to
* the <code>readBits</code> method. A value of 0 indicates the
* most-significant bit, and a value of 7 indicates the least
* significant bit, of the byte being read.
*
* <p> The bit offset is set to 0 when a stream is first
* opened, and is reset to 0 by calls to <code>seek</code>,
* <code>skipBytes</code>, or any <code>read</code> or
* <code>readFully</code> method.
*
* @return an <code>int</code> containing the bit offset between
* 0 and 7, inclusive.
*
* @exception IOException if an I/O error occurs.
*
* @see #setBitOffset
*/
int getBitOffset() throws IOException;
/**
* Sets the bit offset to an integer between 0 and 7, inclusive.
* The byte offset within the stream, as returned by
* <code>getStreamPosition</code>, is left unchanged.
* A value of 0 indicates the
* most-significant bit, and a value of 7 indicates the least
* significant bit, of the byte being read.
*
* @param bitOffset the desired offset, as an <code>int</code>
* between 0 and 7, inclusive.
*
* @exception IllegalArgumentException if <code>bitOffset</code>
* is not between 0 and 7, inclusive.
* @exception IOException if an I/O error occurs.
*
* @see #getBitOffset
*/
void setBitOffset(int bitOffset) throws IOException;
/**
* Reads a single bit from the stream and returns it as an
* <code>int</code> with the value <code>0</code> or
* <code>1</code>. The bit offset is advanced by one and reduced
* modulo 8.
*
* @return an <code>int</code> containing the value <code>0</code>
* or <code>1</code>.
*
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bits.
* @exception IOException if an I/O error occurs.
*/
int readBit() throws IOException;
/**
* Reads a bitstring from the stream and returns it as a
* <code>long</code>, with the first bit read becoming the most
* significant bit of the output. The read starts within the byte
* indicated by <code>getStreamPosition</code>, at the bit given
* by <code>getBitOffset</code>. The bit offset is advanced by
* <code>numBits</code> and reduced modulo 8.
*
* <p> The byte order of the stream has no effect on this
* method. The return value of this method is constructed as
* though the bits were read one at a time, and shifted into
* the right side of the return value, as shown by the following
* pseudo-code:
*
* <pre>{@code
* long accum = 0L;
* for (int i = 0; i < numBits; i++) {
* accum <<= 1; // Shift left one bit to make room
* accum |= readBit();
* }
* }</pre>
*
* Note that the result of <code>readBits(32)</code> may thus not
* be equal to that of <code>readInt()</code> if a reverse network
* byte order is being used (i.e., <code>getByteOrder() ==
* false</code>).
*
* <p> If the end of the stream is encountered before all the bits
* have been read, an <code>java.io.EOFException</code> is thrown.
*
* @param numBits the number of bits to read, as an <code>int</code>
* between 0 and 64, inclusive.
* @return the bitstring, as a <code>long</code> with the last bit
* read stored in the least significant bit.
*
* @exception IllegalArgumentException if <code>numBits</code>
* is not between 0 and 64, inclusive.
* @exception java.io.EOFException if the stream reaches the end before
* reading all the bits.
* @exception IOException if an I/O error occurs.
*/
long readBits(int numBits) throws IOException;
/**
* Returns the total length of the stream, if known. Otherwise,
* <code>-1</code> is returned.
*
* @return a <code>long</code> containing the length of the
* stream, if known, or else <code>-1</code>.
*
* @exception IOException if an I/O error occurs.
*/
long length() throws IOException;
/**
* Moves the stream position forward by a given number of bytes. It
* is possible that this method will only be able to skip forward
* by a smaller number of bytes than requested, for example if the
* end of the stream is reached. In all cases, the actual number
* of bytes skipped is returned. The bit offset is set to zero
* prior to advancing the position.
*
* @param n an <code>int</code> containing the number of bytes to
* be skipped.
*
* @return an <code>int</code> representing the number of bytes skipped.
*
* @exception IOException if an I/O error occurs.
*/
int skipBytes(int n) throws IOException;
/**
* Moves the stream position forward by a given number of bytes.
* This method is identical to <code>skipBytes(int)</code> except
* that it allows for a larger skip distance.
*
* @param n a <code>long</code> containing the number of bytes to
* be skipped.
*
* @return a <code>long</code> representing the number of bytes
* skipped.
*
* @exception IOException if an I/O error occurs.
*/
long skipBytes(long n) throws IOException;
/**
* Sets the current stream position to the desired location. The
* next read will occur at this location. The bit offset is set
* to 0.
*
* <p> An <code>IndexOutOfBoundsException</code> will be thrown if
* <code>pos</code> is smaller than the flushed position (as
* returned by <code>getflushedPosition</code>).
*
* <p> It is legal to seek past the end of the file; an
* <code>java.io.EOFException</code> will be thrown only if a read is
* performed.
*
* @param pos a <code>long</code> containing the desired file
* pointer position.
*
* @exception IndexOutOfBoundsException if <code>pos</code> is smaller
* than the flushed position.
* @exception IOException if any other I/O error occurs.
*/
void seek(long pos) throws IOException;
/**
* Marks a position in the stream to be returned to by a
* subsequent call to <code>reset</code>. Unlike a standard
* <code>InputStream</code>, all <code>ImageInputStream</code>s
* support marking. Additionally, calls to <code>mark</code> and
* <code>reset</code> may be nested arbitrarily.
*
* <p> Unlike the <code>mark</code> methods declared by the
* <code>Reader</code> and <code>InputStream</code> interfaces, no
* <code>readLimit</code> parameter is used. An arbitrary amount
* of data may be read following the call to <code>mark</code>.
*
* <p> The bit position used by the <code>readBits</code> method
* is saved and restored by each pair of calls to
* <code>mark</code> and <code>reset</code>.
*
* <p> Note that it is valid for an <code>ImageReader</code> to call
* <code>flushBefore</code> as part of a read operation.
* Therefore, if an application calls <code>mark</code> prior to
* passing that stream to an <code>ImageReader</code>, the application
* should not assume that the marked position will remain valid after
* the read operation has completed.
*/
void mark();
/**
* Returns the stream pointer to its previous position, including
* the bit offset, at the time of the most recent unmatched call
* to <code>mark</code>.
*
* <p> Calls to <code>reset</code> without a corresponding call
* to <code>mark</code> have no effect.
*
* <p> An <code>IOException</code> will be thrown if the previous
* marked position lies in the discarded portion of the stream.
*
* @exception IOException if an I/O error occurs.
*/
void reset() throws IOException;
/**
* Discards the initial portion of the stream prior to the
* indicated position. Attempting to seek to an offset within the
* flushed portion of the stream will result in an
* <code>IndexOutOfBoundsException</code>.
*
* <p> Calling <code>flushBefore</code> may allow classes
* implementing this interface to free up resources such as memory
* or disk space that are being used to store data from the
* stream.
*
* @param pos a <code>long</code> containing the length of the
* stream prefix that may be flushed.
*
* @exception IndexOutOfBoundsException if <code>pos</code> lies
* in the flushed portion of the stream or past the current stream
* position.
* @exception IOException if an I/O error occurs.
*/
void flushBefore(long pos) throws IOException;
/**
* Discards the initial position of the stream prior to the current
* stream position. Equivalent to
* <code>flushBefore(getStreamPosition())</code>.
*
* @exception IOException if an I/O error occurs.
*/
void flush() throws IOException;
/**
* Returns the earliest position in the stream to which seeking
* may be performed. The returned value will be the maximum of
* all values passed into previous calls to
* <code>flushBefore</code>.
*
* @return the earliest legal position for seeking, as a
* <code>long</code>.
*/
long getFlushedPosition();
/**
* Returns <code>true</code> if this <code>ImageInputStream</code>
* caches data itself in order to allow seeking backwards.
* Applications may consult this in order to decide how frequently,
* or whether, to flush in order to conserve cache resources.
*
* @return <code>true</code> if this <code>ImageInputStream</code>
* caches data.
*
* @see #isCachedMemory
* @see #isCachedFile
*/
boolean isCached();
/**
* Returns <code>true</code> if this <code>ImageInputStream</code>
* caches data itself in order to allow seeking backwards, and
* the cache is kept in main memory. Applications may consult
* this in order to decide how frequently, or whether, to flush
* in order to conserve cache resources.
*
* @return <code>true</code> if this <code>ImageInputStream</code>
* caches data in main memory.
*
* @see #isCached
* @see #isCachedFile
*/
boolean isCachedMemory();
/**
* Returns <code>true</code> if this <code>ImageInputStream</code>
* caches data itself in order to allow seeking backwards, and
* the cache is kept in a temporary file. Applications may consult
* this in order to decide how frequently, or whether, to flush
* in order to conserve cache resources.
*
* @return <code>true</code> if this <code>ImageInputStream</code>
* caches data in a temporary file.
*
* @see #isCached
* @see #isCachedMemory
*/
boolean isCachedFile();
/**
* Closes the stream. Attempts to access a stream that has been
* closed may result in <code>IOException</code>s or incorrect
* behavior. Calling this method may allow classes implementing
* this interface to release resources associated with the stream
* such as memory, disk space, or file descriptors.
*
* @exception IOException if an I/O error occurs.
*/
void close() throws IOException;
}
| apache-2.0 |
tufangorel/hazelcast | hazelcast/src/main/java/com/hazelcast/ringbuffer/impl/RingbufferDataSerializerHook.java | 4323 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.ringbuffer.impl;
import com.hazelcast.internal.serialization.DataSerializerHook;
import com.hazelcast.internal.serialization.impl.FactoryIdHelper;
import com.hazelcast.nio.serialization.DataSerializableFactory;
import com.hazelcast.nio.serialization.IdentifiedDataSerializable;
import com.hazelcast.ringbuffer.impl.operations.AddAllBackupOperation;
import com.hazelcast.ringbuffer.impl.operations.AddAllOperation;
import com.hazelcast.ringbuffer.impl.operations.AddBackupOperation;
import com.hazelcast.ringbuffer.impl.operations.AddOperation;
import com.hazelcast.ringbuffer.impl.operations.GenericOperation;
import com.hazelcast.ringbuffer.impl.operations.MergeBackupOperation;
import com.hazelcast.ringbuffer.impl.operations.MergeOperation;
import com.hazelcast.ringbuffer.impl.operations.ReadManyOperation;
import com.hazelcast.ringbuffer.impl.operations.ReadOneOperation;
import com.hazelcast.ringbuffer.impl.operations.ReplicationOperation;
import static com.hazelcast.internal.serialization.impl.FactoryIdHelper.RINGBUFFER_DS_FACTORY;
import static com.hazelcast.internal.serialization.impl.FactoryIdHelper.RINGBUFFER_DS_FACTORY_ID;
/**
* The {@link DataSerializerHook} for the Ringbuffer.
*/
public class RingbufferDataSerializerHook implements DataSerializerHook {
public static final int F_ID = FactoryIdHelper.getFactoryId(RINGBUFFER_DS_FACTORY, RINGBUFFER_DS_FACTORY_ID);
public static final int GENERIC_OPERATION = 1;
public static final int ADD_BACKUP_OPERATION = 2;
public static final int ADD_OPERATION = 3;
public static final int READ_ONE_OPERATION = 4;
public static final int REPLICATION_OPERATION = 5;
public static final int READ_MANY_OPERATION = 6;
public static final int ADD_ALL_OPERATION = 7;
public static final int ADD_ALL_BACKUP_OPERATION = 8;
public static final int READ_RESULT_SET = 9;
public static final int RINGBUFFER_CONTAINER = 10;
public static final int MERGE_OPERATION = 11;
public static final int MERGE_BACKUP_OPERATION = 12;
@Override
public int getFactoryId() {
return F_ID;
}
@Override
public DataSerializableFactory createFactory() {
return new DataSerializableFactory() {
@Override
public IdentifiedDataSerializable create(int typeId) {
switch (typeId) {
case ADD_BACKUP_OPERATION:
return new AddBackupOperation();
case ADD_OPERATION:
return new AddOperation();
case READ_ONE_OPERATION:
return new ReadOneOperation();
case REPLICATION_OPERATION:
return new ReplicationOperation();
case GENERIC_OPERATION:
return new GenericOperation();
case READ_MANY_OPERATION:
return new ReadManyOperation();
case ADD_ALL_OPERATION:
return new AddAllOperation();
case ADD_ALL_BACKUP_OPERATION:
return new AddAllBackupOperation();
case READ_RESULT_SET:
return new ReadResultSetImpl();
case RINGBUFFER_CONTAINER:
return new RingbufferContainer();
case MERGE_OPERATION:
return new MergeOperation();
case MERGE_BACKUP_OPERATION:
return new MergeBackupOperation();
default:
return null;
}
}
};
}
}
| apache-2.0 |
opennetworkinglab/onos | apps/odtn/api/src/main/java/org/onosproject/odtn/utils/openconfig/OpenConfigConfigOfChannelHandler.java | 1965 | /*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.odtn.utils.openconfig;
import org.onosproject.yang.gen.v1.openconfigterminaldevice.rev20170708.openconfigterminaldevice.terminallogicalchanneltop.logicalchannels.channel.DefaultConfig;
/**
* Utility class to deal with OPENCONFIG Channel/Config ModelObject & Annotation.
*/
public final class OpenConfigConfigOfChannelHandler
extends OpenConfigObjectHandler<DefaultConfig> {
private static final String OPENCONFIG_NAME = "config";
private static final String NAME_SPACE = "http://openconfig.net/yang/terminal-device";
/**
* OpenConfigConfigOfAssignmentHandler Constructor.
*
* @param parent OpenConfigChannelHandler of parent OPENCONFIG(channel)
*/
public OpenConfigConfigOfChannelHandler(OpenConfigChannelHandler parent) {
modelObject = new DefaultConfig();
setResourceId(OPENCONFIG_NAME, NAME_SPACE, null, parent.getResourceIdBuilder());
annotatedNodeInfos = parent.getAnnotatedNodeInfoList();
parent.addConfig(this);
}
/**
* Add child OPENCONFIG(index).
*
* @param index String to be set for modelObject
* @return OpenConfigConfigOfChannelHandler of target OPENCONFIG
*/
public OpenConfigConfigOfChannelHandler addIndex(Integer index) {
modelObject.index(index);
return this;
}
}
| apache-2.0 |
goodwinnk/intellij-community | platform/platform-impl/src/com/intellij/openapi/keymap/impl/ui/QuickListsUi.java | 5109 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.keymap.impl.ui;
import com.intellij.openapi.actionSystem.ex.QuickList;
import com.intellij.openapi.actionSystem.ex.QuickListsManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.options.ConfigurableUi;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.ui.Splitter;
import com.intellij.ui.DocumentAdapter;
import com.intellij.util.ui.ListItemEditor;
import com.intellij.util.ui.ListModelEditor;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.util.List;
class QuickListsUi implements ConfigurableUi<List<QuickList>> {
public static final String EMPTY = "empty";
public static final String PANEL = "panel";
private final KeymapListener keymapListener;
private final ListItemEditor<QuickList> itemEditor = new ListItemEditor<QuickList>() {
@NotNull
@Override
public Class<QuickList> getItemClass() {
return QuickList.class;
}
@Override
public QuickList clone(@NotNull QuickList item, boolean forInPlaceEditing) {
return new QuickList(item.getName(), item.getDescription(), item.getActionIds());
}
@Override
public boolean isEmpty(@NotNull QuickList item) {
return item.getName().isEmpty() && item.getDescription() == null && item.getActionIds().length == 0;
}
@NotNull
@Override
public String getName(@NotNull QuickList item) {
return item.getName();
}
@Override
public boolean isRemovable(@NotNull QuickList item) {
return QuickListsManager.getInstance().getSchemeManager().isMetadataEditable(item);
}
};
private final ListModelEditor<QuickList> editor = new ListModelEditor<>(itemEditor);
private final JComponent component;
private final QuickListPanel itemPanel;
private final JPanel itemPanelWrapper;
QuickListsUi() {
keymapListener = ApplicationManager.getApplication().getMessageBus().syncPublisher(KeymapListener.CHANGE_TOPIC);
final CardLayout cardLayout = new CardLayout();
// doesn't make any sense (and in any case scheme manager cannot preserve order)
editor.disableUpDownActions();
editor.getList().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
QuickList item = editor.getSelected();
if (item == null) {
cardLayout.show(itemPanelWrapper, EMPTY);
itemPanel.setItem(null);
}
else {
cardLayout.show(itemPanelWrapper, PANEL);
itemPanel.setItem(editor.getMutable(item));
}
}
});
itemPanel = new QuickListPanel(editor.getModel());
itemPanel.myName.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(@NotNull DocumentEvent e) {
QuickList item = itemPanel.item;
if (item != null) {
String name = itemPanel.myName.getText();
boolean changed = !item.getName().equals(name);
item.setName(name);
if (changed) {
editor.getList().repaint();
}
}
}
});
itemPanelWrapper = new JPanel(cardLayout);
JLabel descLabel =
new JLabel("<html>Quick Lists allow you to define commonly used groups of actions (for example, refactoring or VCS actions)" +
" and to assign keyboard shortcuts to such groups.</html>");
descLabel.setBorder(new EmptyBorder(0, 25, 0, 25));
itemPanelWrapper.add(descLabel, EMPTY);
itemPanelWrapper.add(itemPanel.getPanel(), PANEL);
Splitter splitter = new Splitter(false, 0.3f);
splitter.setFirstComponent(editor.createComponent());
splitter.setSecondComponent(itemPanelWrapper);
component = splitter;
}
@Override
public void reset(@NotNull List<QuickList> settings) {
editor.reset(settings);
}
@Override
public boolean isModified(@NotNull List<QuickList> settings) {
itemPanel.apply();
return editor.isModified();
}
@Override
public void apply(@NotNull List<QuickList> settings) throws ConfigurationException {
itemPanel.apply();
editor.ensureNonEmptyNames("Quick list should have non empty name");
editor.processModifiedItems((newItem, oldItem) -> {
if (!oldItem.getName().equals(newItem.getName())) {
keymapListener.quickListRenamed(oldItem, newItem);
}
return true;
});
if (isModified(settings)) {
java.util.List<QuickList> result = editor.apply();
keymapListener.processCurrentKeymapChanged(result.toArray(new QuickList[0]));
QuickListsManager.getInstance().setQuickLists(result);
}
}
@NotNull
@Override
public JComponent getComponent() {
return component;
}
}
| apache-2.0 |
robzor92/hops | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/SysInfoWindows.java | 6891 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.IOException;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
/**
* Plugin to calculate resource information on Windows systems.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class SysInfoWindows extends SysInfo {
private static final Log LOG = LogFactory.getLog(SysInfoWindows.class);
private long vmemSize;
private long memSize;
private long vmemAvailable;
private long memAvailable;
private int numProcessors;
private long cpuFrequencyKhz;
private long cumulativeCpuTimeMs;
private float cpuUsage;
private long storageBytesRead;
private long storageBytesWritten;
private long netBytesRead;
private long netBytesWritten;
private long lastRefreshTime;
static final int REFRESH_INTERVAL_MS = 1000;
public SysInfoWindows() {
lastRefreshTime = 0;
reset();
}
@VisibleForTesting
long now() {
return Time.monotonicNow();
}
void reset() {
vmemSize = -1;
memSize = -1;
vmemAvailable = -1;
memAvailable = -1;
numProcessors = -1;
cpuFrequencyKhz = -1;
cumulativeCpuTimeMs = -1;
cpuUsage = -1;
storageBytesRead = -1;
storageBytesWritten = -1;
netBytesRead = -1;
netBytesWritten = -1;
}
String getSystemInfoInfoFromShell() {
try {
ShellCommandExecutor shellExecutor = new ShellCommandExecutor(
new String[] {Shell.getWinUtilsFile().getCanonicalPath(),
"systeminfo" });
shellExecutor.execute();
return shellExecutor.getOutput();
} catch (IOException e) {
LOG.error(StringUtils.stringifyException(e));
}
return null;
}
void refreshIfNeeded() {
long now = now();
if (now - lastRefreshTime > REFRESH_INTERVAL_MS) {
long refreshInterval = now - lastRefreshTime;
lastRefreshTime = now;
long lastCumCpuTimeMs = cumulativeCpuTimeMs;
reset();
String sysInfoStr = getSystemInfoInfoFromShell();
if (sysInfoStr != null) {
final int sysInfoSplitCount = 11;
int index = sysInfoStr.indexOf("\r\n");
if (index >= 0) {
String[] sysInfo = sysInfoStr.substring(0, index).split(",");
if (sysInfo.length == sysInfoSplitCount) {
try {
vmemSize = Long.parseLong(sysInfo[0]);
memSize = Long.parseLong(sysInfo[1]);
vmemAvailable = Long.parseLong(sysInfo[2]);
memAvailable = Long.parseLong(sysInfo[3]);
numProcessors = Integer.parseInt(sysInfo[4]);
cpuFrequencyKhz = Long.parseLong(sysInfo[5]);
cumulativeCpuTimeMs = Long.parseLong(sysInfo[6]);
storageBytesRead = Long.parseLong(sysInfo[7]);
storageBytesWritten = Long.parseLong(sysInfo[8]);
netBytesRead = Long.parseLong(sysInfo[9]);
netBytesWritten = Long.parseLong(sysInfo[10]);
if (lastCumCpuTimeMs != -1) {
/**
* This number will be the aggregated usage across all cores in
* [0.0, 100.0]. For example, it will be 400.0 if there are 8
* cores and each of them is running at 50% utilization.
*/
cpuUsage = (cumulativeCpuTimeMs - lastCumCpuTimeMs)
* 100F / refreshInterval;
}
} catch (NumberFormatException nfe) {
LOG.warn("Error parsing sysInfo", nfe);
}
} else {
LOG.warn("Expected split length of sysInfo to be "
+ sysInfoSplitCount + ". Got " + sysInfo.length);
}
} else {
LOG.warn("Wrong output from sysInfo: " + sysInfoStr);
}
}
}
}
/** {@inheritDoc} */
@Override
public long getVirtualMemorySize() {
refreshIfNeeded();
return vmemSize;
}
/** {@inheritDoc} */
@Override
public long getPhysicalMemorySize() {
refreshIfNeeded();
return memSize;
}
/** {@inheritDoc} */
@Override
public long getAvailableVirtualMemorySize() {
refreshIfNeeded();
return vmemAvailable;
}
/** {@inheritDoc} */
@Override
public long getAvailablePhysicalMemorySize() {
refreshIfNeeded();
return memAvailable;
}
/** {@inheritDoc} */
@Override
public int getNumProcessors() {
refreshIfNeeded();
return numProcessors;
}
/** {@inheritDoc} */
@Override
public int getNumCores() {
return getNumProcessors();
}
/** {@inheritDoc} */
@Override
public long getCpuFrequency() {
refreshIfNeeded();
return cpuFrequencyKhz;
}
/** {@inheritDoc} */
@Override
public long getCumulativeCpuTime() {
refreshIfNeeded();
return cumulativeCpuTimeMs;
}
/** {@inheritDoc} */
@Override
public float getCpuUsagePercentage() {
refreshIfNeeded();
float ret = cpuUsage;
if (ret != -1) {
ret = ret / numProcessors;
}
return ret;
}
/** {@inheritDoc} */
@Override
public float getNumVCoresUsed() {
refreshIfNeeded();
float ret = cpuUsage;
if (ret != -1) {
ret = ret / 100F;
}
return ret;
}
/** {@inheritDoc} */
@Override
public long getNetworkBytesRead() {
refreshIfNeeded();
return netBytesRead;
}
/** {@inheritDoc} */
@Override
public long getNetworkBytesWritten() {
refreshIfNeeded();
return netBytesWritten;
}
@Override
public long getStorageBytesRead() {
refreshIfNeeded();
return storageBytesRead;
}
@Override
public long getStorageBytesWritten() {
refreshIfNeeded();
return storageBytesWritten;
}
/** {@inheritDoc} */
public float getCpuUsage() {
refreshIfNeeded();
return cpuUsage;
}
public int getNumGPUs() {
//No support for GPUs under windows!!!!
return 0;
}
}
| apache-2.0 |
MobileCloudNetworking/icnaas | mcn-ccn-router/ccnx-0.8.2/javasrc/src/main/org/ccnx/ccn/io/content/ConfigSlice.java | 7514 | /*
* Part of the CCNx Java Library.
*
* Copyright (C) 2011-2013 Palo Alto Research Center, Inc.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details. You should have received
* a copy of the GNU Lesser General Public License along with this library;
* if not, write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*/
package org.ccnx.ccn.io.content;
import static org.ccnx.ccn.impl.encoding.CCNProtocolDTags.ConfigSlice;
import static org.ccnx.ccn.impl.encoding.CCNProtocolDTags.ConfigSliceList;
import static org.ccnx.ccn.impl.encoding.CCNProtocolDTags.ConfigSliceOp;
import static org.ccnx.ccn.impl.encoding.CCNProtocolDTags.SyncVersion;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import org.ccnx.ccn.CCNHandle;
import org.ccnx.ccn.config.SystemConfiguration;
import org.ccnx.ccn.impl.encoding.CCNProtocolDTags;
import org.ccnx.ccn.impl.encoding.GenericXMLEncodable;
import org.ccnx.ccn.impl.encoding.XMLDecoder;
import org.ccnx.ccn.impl.encoding.XMLEncoder;
import org.ccnx.ccn.impl.security.crypto.CCNDigestHelper;
import org.ccnx.ccn.impl.support.Log;
import org.ccnx.ccn.profiles.sync.Sync;
import org.ccnx.ccn.protocol.ContentName;
public final class ConfigSlice extends GenericXMLEncodable implements Comparable<ConfigSlice> {
public int version = Sync.SLICE_VERSION;
public ContentName topo;
public ContentName prefix;
private LinkedList<Filter> filters = new LinkedList<Filter>();
private byte[] _hash = null;
/**
* Config slice lists require a ConfigSliceOp written before the
* ContentName, although it does nothing. This class
* encodes and decodes the preceding ConfigSliceOp and the
* ContentName together, representing a single filter element.
*/
@SuppressWarnings("serial")
public static class Filter extends ContentName {
public Filter() {
}
public Filter(ContentName cn) {
super(cn);
}
public Filter(byte[][] arg) {
super(arg);
}
@Override
public void decode(XMLDecoder decoder) throws ContentDecodingException {
decoder.readIntegerElement(ConfigSliceOp);
super.decode(decoder);
}
@Override
public void encode(XMLEncoder encoder) throws ContentEncodingException {
encoder.writeElement(ConfigSliceOp, 0);
super.encode(encoder);
}
}
public ConfigSlice() {}
public ConfigSlice(ContentName topo, ContentName prefix, Collection<Filter> new_filters) {
this.topo = topo;
this.prefix = prefix;
if (new_filters != null)
filters.addAll(new_filters);
}
/**
* Check that a sync ConfigSlice exists in the local repository, and if not create one.
* @param handle
* @param topo from ConfigSlice
* @param prefix from ConfigSlice
* @param filters from ConfigSlice
* @throws IOException
*/
public static ConfigSlice checkAndCreate(ContentName topo, ContentName prefix, Collection<Filter> filters, CCNHandle handle) throws ContentDecodingException, IOException {
ConfigSlice slice = new ConfigSlice(topo, prefix, filters);
//ConfigSlice.NetworkObject csno = new ConfigSlice.NetworkObject(slice.getHash(), handle);
ConfigSliceObject csno = new ConfigSliceObject(slice, handle);
boolean updated = csno.update(SystemConfiguration.SHORT_TIMEOUT);
if (updated)
Log.fine(Log.FAC_SYNC, "found this slice in my repo! {0}", csno.getVersionedName());
else
Log.fine(Log.FAC_SYNC, "didn't find a slice in my repo.");
if (!updated || (updated && (!csno.available() || csno.isGone()))) {
Log.fine(Log.FAC_SYNC, "need to save my data to create the slice for the repo!");
csno.setData(slice);
csno.save();
} else {
Log.fine(Log.FAC_SYNC, "don't need to do anything... returning the existing slice");
}
csno.close();
return slice;
}
public void checkAndCreate(CCNHandle handle) throws ContentDecodingException, ContentEncodingException, IOException{
ConfigSliceObject existingSlice;
try {
//existingSlice = new ConfigSlice.NetworkObject(this.getHash(), handle);
existingSlice = new ConfigSliceObject(this, handle);
boolean updated = existingSlice.update(SystemConfiguration.SHORT_TIMEOUT);
if (!updated || (updated && (!existingSlice.available() || existingSlice.isGone()))) {
existingSlice.setData(this);
existingSlice.save();
}
} catch (ContentDecodingException e) {
Log.warning(Log.FAC_REPO, "ContentDecodingException: Unable to read in existing slice data from repository.");
throw e;
} catch (IOException e) {
Log.warning(Log.FAC_REPO, "IOException: error when attempting to retrieve existing slice");
throw e;
}
existingSlice.close();
}
public boolean deleteSlice(CCNHandle handle) throws IOException{
ConfigSliceObject existingSlice;
try {
existingSlice = new ConfigSliceObject(this.getHash(), handle);
return existingSlice.saveAsGone();
} catch (ContentDecodingException e) {
Log.warning(Log.FAC_REPO, "ContentDecodingException: Unable to read in existing slice data from repository.");
throw new IOException("Unable to delete slice from repository: " + e.getMessage());
} catch (IOException e) {
Log.warning(Log.FAC_REPO, "IOException: error when attempting to retrieve existing slice before deletion");
throw new IOException("Unable to delete slice from repository: " + e.getMessage());
}
}
public byte[] getHash() {
if (null == _hash) {
try {
_hash = CCNDigestHelper.digest(encode());
} catch (ContentEncodingException e) {
// should never happen since we're encoding our own data
throw new RuntimeException(e);
}
}
return _hash;
}
@Override
public void decode(XMLDecoder decoder) throws ContentDecodingException {
decoder.readStartElement(getElementLabel());
version = decoder.readIntegerElement(SyncVersion);
topo = new ContentName();
topo.decode(decoder);
prefix = new ContentName();
prefix.decode(decoder);
decoder.readStartElement(ConfigSliceList);
while (decoder.peekStartElement(CCNProtocolDTags.ConfigSliceOp)) {
Filter f = new Filter();
f.decode(decoder);
filters.add(f);
}
decoder.readEndElement();
decoder.readEndElement();
}
@Override
public void encode(XMLEncoder encoder) throws ContentEncodingException {
encoder.writeStartElement(getElementLabel());
encoder.writeElement(SyncVersion, version);
topo.encode(encoder);
prefix.encode(encoder);
encoder.writeStartElement(ConfigSliceList);
for(Filter f : filters)
f.encode(encoder);
encoder.writeEndElement();
encoder.writeEndElement();
}
@Override
public long getElementLabel() {
return ConfigSlice;
}
@Override
public boolean validate() {
return true;
}
public int hashCode() {
return Arrays.hashCode(getHash());
}
public boolean equals(Object obj) {
if (null == obj)
return false;
if (! (obj instanceof ConfigSlice))
return false;
ConfigSlice otherSlice = (ConfigSlice)obj;
return Arrays.equals(this.getHash(), otherSlice.getHash());
}
@Override
public int compareTo(ConfigSlice o) {
return Arrays.hashCode(getHash()) - Arrays.hashCode(o.getHash());
}
}
| apache-2.0 |
igniterealtime/Spark | plugins/fastpath/src/main/java/org/jivesoftware/fastpath/workspace/invite/InvitationManager.java | 6161 | /**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.fastpath.workspace.invite;
import org.jivesoftware.fastpath.FastpathPlugin;
import org.jivesoftware.fastpath.FpRes;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.util.StringUtils;
import org.jivesoftware.smackx.workgroup.packet.RoomInvitation;
import org.jivesoftware.smackx.workgroup.packet.RoomTransfer;
import org.jivesoftware.spark.ChatManager;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.ui.ChatRoom;
import org.jivesoftware.spark.util.log.Log;
import org.jxmpp.jid.Jid;
public class InvitationManager {
private InvitationManager() {
}
/**
* Invite a user to a chat room.
*
* @param chatRoom the <code>ChatRoom</code> to invite or transfer.
* @param sessionID the sessionID of this Fastpath session.
* @param jid the jid of the room.
* @param messageText the message to send to the user.
* @param transfer true if this is a transfer.
*/
public static void transferOrInviteUser(ChatRoom chatRoom, String workgroup, String sessionID, final Jid jid, String messageText, final boolean transfer) {
String msg = messageText != null ? StringUtils.escapeForXml(messageText).toString() : FpRes.getString("message.please.join.me.in.conference");
try {
if (!transfer) {
// TODO : CHECK FASHPATH
FastpathPlugin.getAgentSession().sendRoomInvitation(RoomInvitation.Type.user, jid, sessionID, msg);
}
else {
// TODO : CHECK FASHPATH
FastpathPlugin.getAgentSession().sendRoomTransfer(RoomTransfer.Type.user, jid.toString(), sessionID, msg);
}
}
catch (XMPPException | SmackException | InterruptedException e) {
Log.error(e);
}
String username = SparkManager.getUserManager().getUserNicknameFromJID(jid.asBareJid());
String notification = FpRes.getString("message.user.has.been.invited", username);
if (transfer) {
notification = FpRes.getString("message.waiting.for.user", username);
}
chatRoom.getTranscriptWindow().insertNotificationMessage(notification, ChatManager.NOTIFICATION_COLOR);
}
/**
* Invite or transfer a queue.
*
* @param chatRoom the <code>ChatRoom</code> to invite or transfer.
* @param sessionID the sessionID of this Fastpath session.
* @param jid the jid of the room.
* @param messageText the message to send to the user.
* @param transfer true if this is a transfer.
*/
public static void transferOrInviteToQueue(ChatRoom chatRoom, String workgroup, String sessionID, final Jid jid, String messageText, final boolean transfer) {
String msg = messageText != null ? StringUtils.escapeForXml(messageText).toString() : FpRes.getString("message.please.join.me.in.conference");
try {
if (!transfer) {
// TODO : CHECK FASHPATH
FastpathPlugin.getAgentSession().sendRoomInvitation(RoomInvitation.Type.queue, jid, sessionID, msg);
}
else {
// TODO : CHECK FASHPATH
FastpathPlugin.getAgentSession().sendRoomTransfer(RoomTransfer.Type.queue, jid.toString(), sessionID, msg);
}
}
catch (XMPPException | SmackException | InterruptedException e) {
Log.error(e);
}
String username = SparkManager.getUserManager().getUserNicknameFromJID(jid.asBareJid());
String notification = FpRes.getString("message.user.has.been.invited", username);
if (transfer) {
notification = FpRes.getString("message.waiting.for.user", username);
}
chatRoom.getTranscriptWindow().insertNotificationMessage(notification, ChatManager.NOTIFICATION_COLOR);
}
/**
* Invite or transfer a queue.
*
* @param chatRoom the <code>ChatRoom</code> to invite or transfer.
* @param sessionID the sessionID of this Fastpath session.
* @param jid the jid of the room.
* @param messageText the message to send to the user.
* @param transfer true if this is a transfer.
*/
public static void transferOrInviteToWorkgroup(ChatRoom chatRoom, String workgroup, String sessionID, final Jid jid, String messageText, final boolean transfer) {
String msg = messageText != null ? StringUtils.escapeForXml(messageText).toString() : FpRes.getString("message.please.join.me.in.conference");
try {
if (!transfer) {
// TODO : CHECK FASHPATH
FastpathPlugin.getAgentSession().sendRoomInvitation(RoomInvitation.Type.workgroup, jid, sessionID, msg);
}
else {
FastpathPlugin.getAgentSession().sendRoomTransfer(RoomTransfer.Type.workgroup, jid.toString(), sessionID, msg);
}
}
catch (XMPPException | SmackException | InterruptedException e) {
Log.error(e);
}
String username = SparkManager.getUserManager().getUserNicknameFromJID(jid.asBareJid());
String notification = FpRes.getString("message.user.has.been.invited", username);
if (transfer) {
notification = FpRes.getString("message.waiting.for.user", username);
}
chatRoom.getTranscriptWindow().insertNotificationMessage(notification, ChatManager.NOTIFICATION_COLOR);
}
}
| apache-2.0 |
tseen/Federated-HDFS | tseenliu/FedHDFS-hadoop-src/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java | 36388 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.LocalConfigurationProvider;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.resourcemanager.Application;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNodes;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.Task;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MockRMWithAMS;
import org.apache.hadoop.yarn.server.resourcemanager.TestAMAuthorization.MyContainerManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplication;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerApplicationAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.TestSchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.security.ClientToAMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class TestCapacityScheduler {
private static final Log LOG = LogFactory.getLog(TestCapacityScheduler.class);
private final int GB = 1024;
private static final String A = CapacitySchedulerConfiguration.ROOT + ".a";
private static final String B = CapacitySchedulerConfiguration.ROOT + ".b";
private static final String A1 = A + ".a1";
private static final String A2 = A + ".a2";
private static final String B1 = B + ".b1";
private static final String B2 = B + ".b2";
private static final String B3 = B + ".b3";
private static float A_CAPACITY = 10.5f;
private static float B_CAPACITY = 89.5f;
private static float A1_CAPACITY = 30;
private static float A2_CAPACITY = 70;
private static float B1_CAPACITY = 79.2f;
private static float B2_CAPACITY = 0.8f;
private static float B3_CAPACITY = 20;
private ResourceManager resourceManager = null;
private RMContext mockContext;
@Before
public void setUp() throws Exception {
resourceManager = new ResourceManager();
CapacitySchedulerConfiguration csConf
= new CapacitySchedulerConfiguration();
setupQueueConfiguration(csConf);
YarnConfiguration conf = new YarnConfiguration(csConf);
conf.setClass(YarnConfiguration.RM_SCHEDULER,
CapacityScheduler.class, ResourceScheduler.class);
resourceManager.init(conf);
resourceManager.getRMContext().getContainerTokenSecretManager().rollMasterKey();
resourceManager.getRMContext().getNMTokenSecretManager().rollMasterKey();
((AsyncDispatcher)resourceManager.getRMContext().getDispatcher()).start();
mockContext = mock(RMContext.class);
when(mockContext.getConfigurationProvider()).thenReturn(
new LocalConfigurationProvider());
}
@After
public void tearDown() throws Exception {
if (resourceManager != null) {
resourceManager.stop();
}
}
@Test (timeout = 30000)
public void testConfValidation() throws Exception {
ResourceScheduler scheduler = new CapacityScheduler();
scheduler.setRMContext(resourceManager.getRMContext());
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 2048);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 1024);
try {
scheduler.reinitialize(conf, mockContext);
fail("Exception is expected because the min memory allocation is" +
" larger than the max memory allocation.");
} catch (YarnRuntimeException e) {
// Exception is expected.
assertTrue("The thrown exception is not the expected one.",
e.getMessage().startsWith(
"Invalid resource scheduler memory"));
}
conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, 2);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, 1);
try {
scheduler.reinitialize(conf, mockContext);
fail("Exception is expected because the min vcores allocation is" +
" larger than the max vcores allocation.");
} catch (YarnRuntimeException e) {
// Exception is expected.
assertTrue("The thrown exception is not the expected one.",
e.getMessage().startsWith(
"Invalid resource scheduler vcores"));
}
}
private org.apache.hadoop.yarn.server.resourcemanager.NodeManager
registerNode(String hostName, int containerManagerPort, int httpPort,
String rackName, Resource capability)
throws IOException, YarnException {
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm =
new org.apache.hadoop.yarn.server.resourcemanager.NodeManager(
hostName, containerManagerPort, httpPort, rackName, capability,
resourceManager);
NodeAddedSchedulerEvent nodeAddEvent1 =
new NodeAddedSchedulerEvent(resourceManager.getRMContext()
.getRMNodes().get(nm.getNodeId()));
resourceManager.getResourceScheduler().handle(nodeAddEvent1);
return nm;
}
@Test
public void testCapacityScheduler() throws Exception {
LOG.info("--- START: testCapacityScheduler ---");
// Register node1
String host_0 = "host_0";
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm_0 =
registerNode(host_0, 1234, 2345, NetworkTopology.DEFAULT_RACK,
Resources.createResource(4 * GB, 1));
// Register node2
String host_1 = "host_1";
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm_1 =
registerNode(host_1, 1234, 2345, NetworkTopology.DEFAULT_RACK,
Resources.createResource(2 * GB, 1));
// ResourceRequest priorities
Priority priority_0 =
org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.create(0);
Priority priority_1 =
org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.create(1);
// Submit an application
Application application_0 = new Application("user_0", "a1", resourceManager);
application_0.submit();
application_0.addNodeManager(host_0, 1234, nm_0);
application_0.addNodeManager(host_1, 1234, nm_1);
Resource capability_0_0 = Resources.createResource(1 * GB, 1);
application_0.addResourceRequestSpec(priority_1, capability_0_0);
Resource capability_0_1 = Resources.createResource(2 * GB, 1);
application_0.addResourceRequestSpec(priority_0, capability_0_1);
Task task_0_0 = new Task(application_0, priority_1,
new String[] {host_0, host_1});
application_0.addTask(task_0_0);
// Submit another application
Application application_1 = new Application("user_1", "b2", resourceManager);
application_1.submit();
application_1.addNodeManager(host_0, 1234, nm_0);
application_1.addNodeManager(host_1, 1234, nm_1);
Resource capability_1_0 = Resources.createResource(3 * GB, 1);
application_1.addResourceRequestSpec(priority_1, capability_1_0);
Resource capability_1_1 = Resources.createResource(2 * GB, 1);
application_1.addResourceRequestSpec(priority_0, capability_1_1);
Task task_1_0 = new Task(application_1, priority_1,
new String[] {host_0, host_1});
application_1.addTask(task_1_0);
// Send resource requests to the scheduler
application_0.schedule();
application_1.schedule();
// Send a heartbeat to kick the tires on the Scheduler
LOG.info("Kick!");
// task_0_0 and task_1_0 allocated, used=4G
nodeUpdate(nm_0);
// nothing allocated
nodeUpdate(nm_1);
// Get allocations from the scheduler
application_0.schedule(); // task_0_0
checkApplicationResourceUsage(1 * GB, application_0);
application_1.schedule(); // task_1_0
checkApplicationResourceUsage(3 * GB, application_1);
checkNodeResourceUsage(4*GB, nm_0); // task_0_0 (1G) and task_1_0 (3G)
checkNodeResourceUsage(0*GB, nm_1); // no tasks, 2G available
LOG.info("Adding new tasks...");
Task task_1_1 = new Task(application_1, priority_0,
new String[] {ResourceRequest.ANY});
application_1.addTask(task_1_1);
application_1.schedule();
Task task_0_1 = new Task(application_0, priority_0,
new String[] {host_0, host_1});
application_0.addTask(task_0_1);
application_0.schedule();
// Send a heartbeat to kick the tires on the Scheduler
LOG.info("Sending hb from " + nm_0.getHostName());
// nothing new, used=4G
nodeUpdate(nm_0);
LOG.info("Sending hb from " + nm_1.getHostName());
// task_0_1 is prefer as locality, used=2G
nodeUpdate(nm_1);
// Get allocations from the scheduler
LOG.info("Trying to allocate...");
application_0.schedule();
checkApplicationResourceUsage(1 * GB, application_0);
application_1.schedule();
checkApplicationResourceUsage(5 * GB, application_1);
nodeUpdate(nm_0);
nodeUpdate(nm_1);
checkNodeResourceUsage(4*GB, nm_0);
checkNodeResourceUsage(2*GB, nm_1);
LOG.info("--- END: testCapacityScheduler ---");
}
private void nodeUpdate(
org.apache.hadoop.yarn.server.resourcemanager.NodeManager nm) {
RMNode node = resourceManager.getRMContext().getRMNodes().get(nm.getNodeId());
// Send a heartbeat to kick the tires on the Scheduler
NodeUpdateSchedulerEvent nodeUpdate = new NodeUpdateSchedulerEvent(node);
resourceManager.getResourceScheduler().handle(nodeUpdate);
}
private void setupQueueConfiguration(CapacitySchedulerConfiguration conf) {
// Define top-level queues
conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {"a", "b"});
conf.setCapacity(A, A_CAPACITY);
conf.setCapacity(B, B_CAPACITY);
// Define 2nd-level queues
conf.setQueues(A, new String[] {"a1", "a2"});
conf.setCapacity(A1, A1_CAPACITY);
conf.setUserLimitFactor(A1, 100.0f);
conf.setCapacity(A2, A2_CAPACITY);
conf.setUserLimitFactor(A2, 100.0f);
conf.setQueues(B, new String[] {"b1", "b2", "b3"});
conf.setCapacity(B1, B1_CAPACITY);
conf.setUserLimitFactor(B1, 100.0f);
conf.setCapacity(B2, B2_CAPACITY);
conf.setUserLimitFactor(B2, 100.0f);
conf.setCapacity(B3, B3_CAPACITY);
conf.setUserLimitFactor(B3, 100.0f);
LOG.info("Setup top-level queues a and b");
}
@Test
public void testMaximumCapacitySetup() {
float delta = 0.0000001f;
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
assertEquals(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY_VALUE,conf.getMaximumCapacity(A),delta);
conf.setMaximumCapacity(A, 50.0f);
assertEquals(50.0f, conf.getMaximumCapacity(A),delta);
conf.setMaximumCapacity(A, -1);
assertEquals(CapacitySchedulerConfiguration.MAXIMUM_CAPACITY_VALUE,conf.getMaximumCapacity(A),delta);
}
@Test
public void testRefreshQueues() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null,
null, new RMContainerTokenSecretManager(conf),
new NMTokenSecretManagerInRM(conf),
new ClientToAMTokenSecretManagerInRM(), null);
setupQueueConfiguration(conf);
cs.setConf(new YarnConfiguration());
cs.setRMContext(resourceManager.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, rmContext);
checkQueueCapacities(cs, A_CAPACITY, B_CAPACITY);
conf.setCapacity(A, 80f);
conf.setCapacity(B, 20f);
cs.reinitialize(conf, mockContext);
checkQueueCapacities(cs, 80f, 20f);
cs.stop();
}
private void checkQueueCapacities(CapacityScheduler cs,
float capacityA, float capacityB) {
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueA = findQueue(rootQueue, A);
CSQueue queueB = findQueue(rootQueue, B);
CSQueue queueA1 = findQueue(queueA, A1);
CSQueue queueA2 = findQueue(queueA, A2);
CSQueue queueB1 = findQueue(queueB, B1);
CSQueue queueB2 = findQueue(queueB, B2);
CSQueue queueB3 = findQueue(queueB, B3);
float capA = capacityA / 100.0f;
float capB = capacityB / 100.0f;
checkQueueCapacity(queueA, capA, capA, 1.0f, 1.0f);
checkQueueCapacity(queueB, capB, capB, 1.0f, 1.0f);
checkQueueCapacity(queueA1, A1_CAPACITY / 100.0f,
(A1_CAPACITY/100.0f) * capA, 1.0f, 1.0f);
checkQueueCapacity(queueA2, A2_CAPACITY / 100.0f,
(A2_CAPACITY/100.0f) * capA, 1.0f, 1.0f);
checkQueueCapacity(queueB1, B1_CAPACITY / 100.0f,
(B1_CAPACITY/100.0f) * capB, 1.0f, 1.0f);
checkQueueCapacity(queueB2, B2_CAPACITY / 100.0f,
(B2_CAPACITY/100.0f) * capB, 1.0f, 1.0f);
checkQueueCapacity(queueB3, B3_CAPACITY / 100.0f,
(B3_CAPACITY/100.0f) * capB, 1.0f, 1.0f);
}
private void checkQueueCapacity(CSQueue q, float expectedCapacity,
float expectedAbsCapacity, float expectedMaxCapacity,
float expectedAbsMaxCapacity) {
final float epsilon = 1e-5f;
assertEquals("capacity", expectedCapacity, q.getCapacity(), epsilon);
assertEquals("absolute capacity", expectedAbsCapacity,
q.getAbsoluteCapacity(), epsilon);
assertEquals("maximum capacity", expectedMaxCapacity,
q.getMaximumCapacity(), epsilon);
assertEquals("absolute maximum capacity", expectedAbsMaxCapacity,
q.getAbsoluteMaximumCapacity(), epsilon);
}
private CSQueue findQueue(CSQueue root, String queuePath) {
if (root.getQueuePath().equals(queuePath)) {
return root;
}
List<CSQueue> childQueues = root.getChildQueues();
if (childQueues != null) {
for (CSQueue q : childQueues) {
if (queuePath.startsWith(q.getQueuePath())) {
CSQueue result = findQueue(q, queuePath);
if (result != null) {
return result;
}
}
}
}
return null;
}
private void checkApplicationResourceUsage(int expected,
Application application) {
Assert.assertEquals(expected, application.getUsedResources().getMemory());
}
private void checkNodeResourceUsage(int expected,
org.apache.hadoop.yarn.server.resourcemanager.NodeManager node) {
Assert.assertEquals(expected, node.getUsed().getMemory());
node.checkResourceUsage();
}
/** Test that parseQueue throws an exception when two leaf queues have the
* same name
* @throws IOException
*/
@Test(expected=IOException.class)
public void testParseQueue() throws IOException {
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
conf.setQueues(CapacitySchedulerConfiguration.ROOT + ".a.a1", new String[] {"b1"} );
conf.setCapacity(CapacitySchedulerConfiguration.ROOT + ".a.a1.b1", 100.0f);
conf.setUserLimitFactor(CapacitySchedulerConfiguration.ROOT + ".a.a1.b1", 100.0f);
cs.reinitialize(conf, new RMContextImpl(null, null, null, null, null,
null, new RMContainerTokenSecretManager(conf),
new NMTokenSecretManagerInRM(conf),
new ClientToAMTokenSecretManagerInRM(), null));
}
@Test
public void testReconnectedNode() throws Exception {
CapacitySchedulerConfiguration csConf =
new CapacitySchedulerConfiguration();
setupQueueConfiguration(csConf);
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(new YarnConfiguration());
cs.setRMContext(resourceManager.getRMContext());
cs.init(csConf);
cs.start();
cs.reinitialize(csConf, new RMContextImpl(null, null, null, null,
null, null, new RMContainerTokenSecretManager(csConf),
new NMTokenSecretManagerInRM(csConf),
new ClientToAMTokenSecretManagerInRM(), null));
RMNode n1 = MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1);
RMNode n2 = MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 2);
cs.handle(new NodeAddedSchedulerEvent(n1));
cs.handle(new NodeAddedSchedulerEvent(n2));
Assert.assertEquals(6 * GB, cs.getClusterResource().getMemory());
// reconnect n1 with downgraded memory
n1 = MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 1);
cs.handle(new NodeRemovedSchedulerEvent(n1));
cs.handle(new NodeAddedSchedulerEvent(n1));
Assert.assertEquals(4 * GB, cs.getClusterResource().getMemory());
cs.stop();
}
@Test
public void testRefreshQueuesWithNewQueue() throws Exception {
CapacityScheduler cs = new CapacityScheduler();
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
cs.setConf(new YarnConfiguration());
cs.setRMContext(resourceManager.getRMContext());
cs.init(conf);
cs.start();
cs.reinitialize(conf, new RMContextImpl(null, null, null, null, null,
null, new RMContainerTokenSecretManager(conf),
new NMTokenSecretManagerInRM(conf),
new ClientToAMTokenSecretManagerInRM(), null));
checkQueueCapacities(cs, A_CAPACITY, B_CAPACITY);
// Add a new queue b4
String B4 = B + ".b4";
float B4_CAPACITY = 10;
B3_CAPACITY -= B4_CAPACITY;
try {
conf.setCapacity(A, 80f);
conf.setCapacity(B, 20f);
conf.setQueues(B, new String[] {"b1", "b2", "b3", "b4"});
conf.setCapacity(B1, B1_CAPACITY);
conf.setCapacity(B2, B2_CAPACITY);
conf.setCapacity(B3, B3_CAPACITY);
conf.setCapacity(B4, B4_CAPACITY);
cs.reinitialize(conf,mockContext);
checkQueueCapacities(cs, 80f, 20f);
// Verify parent for B4
CSQueue rootQueue = cs.getRootQueue();
CSQueue queueB = findQueue(rootQueue, B);
CSQueue queueB4 = findQueue(queueB, B4);
assertEquals(queueB, queueB4.getParent());
} finally {
B3_CAPACITY += B4_CAPACITY;
cs.stop();
}
}
@Test
public void testCapacitySchedulerInfo() throws Exception {
QueueInfo queueInfo = resourceManager.getResourceScheduler().getQueueInfo("a", true, true);
Assert.assertEquals(queueInfo.getQueueName(), "a");
Assert.assertEquals(queueInfo.getChildQueues().size(), 2);
List<QueueUserACLInfo> userACLInfo = resourceManager.getResourceScheduler().getQueueUserAclInfo();
Assert.assertNotNull(userACLInfo);
for (QueueUserACLInfo queueUserACLInfo : userACLInfo) {
Assert.assertEquals(getQueueCount(userACLInfo, queueUserACLInfo.getQueueName()), 1);
}
}
private int getQueueCount(List<QueueUserACLInfo> queueInformation, String queueName) {
int result = 0;
for (QueueUserACLInfo queueUserACLInfo : queueInformation) {
if (queueName.equals(queueUserACLInfo.getQueueName())) {
result++;
}
}
return result;
}
@SuppressWarnings("resource")
@Test
public void testBlackListNodes() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
rm.start();
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
String host = "127.0.0.1";
RMNode node =
MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1, host);
cs.handle(new NodeAddedSchedulerEvent(node));
ApplicationId appId = BuilderUtils.newApplicationId(100, 1);
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
appId, 1);
SchedulerEvent addAppEvent =
new AppAddedSchedulerEvent(appId, "default", "user");
cs.handle(addAppEvent);
SchedulerEvent addAttemptEvent =
new AppAttemptAddedSchedulerEvent(appAttemptId, false);
cs.handle(addAttemptEvent);
// Verify the blacklist can be updated independent of requesting containers
cs.allocate(appAttemptId, Collections.<ResourceRequest>emptyList(),
Collections.<ContainerId>emptyList(),
Collections.singletonList(host), null);
Assert.assertTrue(cs.getApplicationAttempt(appAttemptId).isBlacklisted(host));
cs.allocate(appAttemptId, Collections.<ResourceRequest>emptyList(),
Collections.<ContainerId>emptyList(), null,
Collections.singletonList(host));
Assert.assertFalse(cs.getApplicationAttempt(appAttemptId).isBlacklisted(host));
rm.stop();
}
@Test (timeout = 5000)
public void testApplicationComparator()
{
CapacityScheduler cs = new CapacityScheduler();
Comparator<FiCaSchedulerApp> appComparator= cs.getApplicationComparator();
ApplicationId id1 = ApplicationId.newInstance(1, 1);
ApplicationId id2 = ApplicationId.newInstance(1, 2);
ApplicationId id3 = ApplicationId.newInstance(2, 1);
//same clusterId
FiCaSchedulerApp app1 = Mockito.mock(FiCaSchedulerApp.class);
when(app1.getApplicationId()).thenReturn(id1);
FiCaSchedulerApp app2 = Mockito.mock(FiCaSchedulerApp.class);
when(app2.getApplicationId()).thenReturn(id2);
FiCaSchedulerApp app3 = Mockito.mock(FiCaSchedulerApp.class);
when(app3.getApplicationId()).thenReturn(id3);
assertTrue(appComparator.compare(app1, app2) < 0);
//different clusterId
assertTrue(appComparator.compare(app1, app3) < 0);
assertTrue(appComparator.compare(app2, app3) < 0);
}
@Test
public void testGetAppsInQueue() throws Exception {
Application application_0 = new Application("user_0", "a1", resourceManager);
application_0.submit();
Application application_1 = new Application("user_0", "a2", resourceManager);
application_1.submit();
Application application_2 = new Application("user_0", "b2", resourceManager);
application_2.submit();
ResourceScheduler scheduler = resourceManager.getResourceScheduler();
List<ApplicationAttemptId> appsInA1 = scheduler.getAppsInQueue("a1");
assertEquals(1, appsInA1.size());
List<ApplicationAttemptId> appsInA = scheduler.getAppsInQueue("a");
assertTrue(appsInA.contains(application_0.getApplicationAttemptId()));
assertTrue(appsInA.contains(application_1.getApplicationAttemptId()));
assertEquals(2, appsInA.size());
List<ApplicationAttemptId> appsInRoot = scheduler.getAppsInQueue("root");
assertTrue(appsInRoot.contains(application_0.getApplicationAttemptId()));
assertTrue(appsInRoot.contains(application_1.getApplicationAttemptId()));
assertTrue(appsInRoot.contains(application_2.getApplicationAttemptId()));
assertEquals(3, appsInRoot.size());
Assert.assertNull(scheduler.getAppsInQueue("nonexistentqueue"));
}
@Test
public void testAddAndRemoveAppFromCapacityScheduler() throws Exception {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
setupQueueConfiguration(conf);
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
@SuppressWarnings("unchecked")
AbstractYarnScheduler<SchedulerApplicationAttempt, SchedulerNode> cs =
(AbstractYarnScheduler<SchedulerApplicationAttempt, SchedulerNode>) rm
.getResourceScheduler();
SchedulerApplication<SchedulerApplicationAttempt> app =
TestSchedulerUtils.verifyAppAddedAndRemovedFromScheduler(
cs.getSchedulerApplications(), cs, "a1");
Assert.assertEquals("a1", app.getQueue().getQueueName());
}
@Test
public void testAsyncScheduling() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
rm.start();
CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
final int NODES = 100;
// Register nodes
for (int i=0; i < NODES; ++i) {
String host = "192.168.1." + i;
RMNode node =
MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1, host);
cs.handle(new NodeAddedSchedulerEvent(node));
}
// Now directly exercise the scheduling loop
for (int i=0; i < NODES; ++i) {
CapacityScheduler.schedule(cs);
}
}
@Test(timeout = 30000)
public void testAllocateDoesNotBlockOnSchedulerLock() throws Exception {
final YarnConfiguration conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
MyContainerManager containerManager = new MyContainerManager();
final MockRMWithAMS rm =
new MockRMWithAMS(conf, containerManager);
rm.start();
MockNM nm1 = rm.registerNode("localhost:1234", 5120);
Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(2);
acls.put(ApplicationAccessType.VIEW_APP, "*");
RMApp app = rm.submitApp(1024, "appname", "appuser", acls);
nm1.nodeHeartbeat(true);
RMAppAttempt attempt = app.getCurrentAppAttempt();
ApplicationAttemptId applicationAttemptId = attempt.getAppAttemptId();
int msecToWait = 10000;
int msecToSleep = 100;
while (attempt.getAppAttemptState() != RMAppAttemptState.LAUNCHED
&& msecToWait > 0) {
LOG.info("Waiting for AppAttempt to reach LAUNCHED state. "
+ "Current state is " + attempt.getAppAttemptState());
Thread.sleep(msecToSleep);
msecToWait -= msecToSleep;
}
Assert.assertEquals(attempt.getAppAttemptState(),
RMAppAttemptState.LAUNCHED);
// Create a client to the RM.
final YarnRPC rpc = YarnRPC.create(conf);
UserGroupInformation currentUser =
UserGroupInformation.createRemoteUser(applicationAttemptId.toString());
Credentials credentials = containerManager.getContainerCredentials();
final InetSocketAddress rmBindAddress =
rm.getApplicationMasterService().getBindAddress();
Token<? extends TokenIdentifier> amRMToken =
MockRMWithAMS.setupAndReturnAMRMToken(rmBindAddress,
credentials.getAllTokens());
currentUser.addToken(amRMToken);
ApplicationMasterProtocol client =
currentUser.doAs(new PrivilegedAction<ApplicationMasterProtocol>() {
@Override
public ApplicationMasterProtocol run() {
return (ApplicationMasterProtocol) rpc.getProxy(
ApplicationMasterProtocol.class, rmBindAddress, conf);
}
});
RegisterApplicationMasterRequest request =
RegisterApplicationMasterRequest.newInstance("localhost", 12345, "");
client.registerApplicationMaster(request);
// grab the scheduler lock from another thread
// and verify an allocate call in this thread doesn't block on it
final CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler();
final CyclicBarrier barrier = new CyclicBarrier(2);
Thread otherThread = new Thread(new Runnable() {
@Override
public void run() {
synchronized(cs) {
try {
barrier.await();
barrier.await();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (BrokenBarrierException e) {
e.printStackTrace();
}
}
}
});
otherThread.start();
barrier.await();
AllocateRequest allocateRequest =
AllocateRequest.newInstance(0, 0.0f, null, null, null);
client.allocate(allocateRequest);
barrier.await();
otherThread.join();
rm.stop();
}
@Test
public void testNumClusterNodes() throws Exception {
YarnConfiguration conf = new YarnConfiguration();
CapacityScheduler cs = new CapacityScheduler();
cs.setConf(conf);
RMContextImpl rmContext = new RMContextImpl(null, null, null, null, null,
null, new RMContainerTokenSecretManager(conf),
new NMTokenSecretManagerInRM(conf),
new ClientToAMTokenSecretManagerInRM(), null);
cs.setRMContext(rmContext);
CapacitySchedulerConfiguration csConf =
new CapacitySchedulerConfiguration();
setupQueueConfiguration(csConf);
cs.init(csConf);
cs.start();
assertEquals(0, cs.getNumClusterNodes());
RMNode n1 = MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1);
RMNode n2 = MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 2);
cs.handle(new NodeAddedSchedulerEvent(n1));
cs.handle(new NodeAddedSchedulerEvent(n2));
assertEquals(2, cs.getNumClusterNodes());
cs.handle(new NodeRemovedSchedulerEvent(n1));
assertEquals(1, cs.getNumClusterNodes());
cs.handle(new NodeAddedSchedulerEvent(n1));
assertEquals(2, cs.getNumClusterNodes());
cs.handle(new NodeRemovedSchedulerEvent(n2));
cs.handle(new NodeRemovedSchedulerEvent(n1));
assertEquals(0, cs.getNumClusterNodes());
cs.stop();
}
@Test(timeout = 30000)
public void testRecoverRequestAfterPreemption() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 8000);
RMApp app1 = rm1.submitApp(1024);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
CapacityScheduler cs = (CapacityScheduler) rm1.getResourceScheduler();
// request a container.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
ContainerId containerId1 = ContainerId.newInstance(am1
.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId1, RMContainerState.ALLOCATED);
RMContainer rmContainer = cs.getRMContainer(containerId1);
List<ResourceRequest> requests = rmContainer.getResourceRequests();
FiCaSchedulerApp app = cs.getApplicationAttempt(am1
.getApplicationAttemptId());
FiCaSchedulerNode node = cs.getNode(rmContainer.getAllocatedNode());
for (ResourceRequest request : requests) {
// Skip the OffRack and RackLocal resource requests.
if (request.getResourceName().equals(node.getRackName())
|| request.getResourceName().equals(ResourceRequest.ANY)) {
continue;
}
// Already the node local resource request is cleared from RM after
// allocation.
Assert.assertNull(app.getResourceRequest(request.getPriority(), request
.getResourceName()));
}
// Call killContainer to preempt the container
cs.killContainer(rmContainer);
Assert.assertEquals(3, requests.size());
for (ResourceRequest request : requests) {
// Resource request must have added back in RM after preempt event
// handling.
Assert.assertEquals(1, app.getResourceRequest(request.getPriority(),
request.getResourceName()).getNumContainers());
}
// New container will be allocated and will move to ALLOCATED state
ContainerId containerId2 = ContainerId.newInstance(am1
.getApplicationAttemptId(), 3);
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED);
// allocate container
List<Container> containers = am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
// Now with updated ResourceRequest, a container is allocated for AM.
Assert.assertTrue(containers.size() == 1);
}
}
| apache-2.0 |
onders86/camel | camel-core/src/test/java/org/apache/camel/issues/AdviceWithPolicyTest.java | 2727 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.issues;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Processor;
import org.apache.camel.builder.AdviceWithRouteBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.spi.Policy;
import org.apache.camel.spi.RouteContext;
import org.junit.Test;
/**
* @version
*/
public class AdviceWithPolicyTest extends ContextTestSupport {
@Test
public void testAdviceWithPolicy() throws Exception {
RouteDefinition route = context.getRouteDefinitions().get(0);
route.adviceWith(context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
weaveById("b").after().to("mock:result");
}
});
context.start();
getMockEndpoint("mock:a").expectedMessageCount(1);
getMockEndpoint("mock:b").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.policy(new MyPolicy())
.to("mock:a").id("a")
.to("mock:b").id("b");
}
};
}
private static final class MyPolicy implements Policy {
@Override
public void beforeWrap(RouteContext routeContext, ProcessorDefinition<?> definition) {
// noop
}
@Override
public Processor wrap(RouteContext routeContext, Processor processor) {
return processor;
}
}
}
| apache-2.0 |
brettwooldridge/buck | src/com/facebook/buck/core/config/ConfigViewCache.java | 3405 | /*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.config;
import com.facebook.buck.util.function.ThrowingFunction;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.util.concurrent.UncheckedExecutionException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* A cache for views of some Config.
*
* <p>This class is useful if the ConfigViews memoize certain values that may be expensive to
* compute.
*
* @param <T> Config type
*/
public final class ConfigViewCache<T> {
private final LoadingCache<Class<? extends ConfigView<T>>, ? extends ConfigView<T>> cache;
public ConfigViewCache(T delegate, Class<T> clazz) {
this.cache = CacheBuilder.newBuilder().build(new ConfigViewCacheLoader<>(delegate, clazz));
}
public <V extends ConfigView<T>> V getView(Class<V> viewClass) {
try {
return viewClass.cast(cache.getUnchecked(viewClass));
} catch (UncheckedExecutionException e) {
Throwables.throwIfUnchecked(e.getCause());
throw e;
}
}
private static class ConfigViewCacheLoader<T>
extends CacheLoader<Class<? extends ConfigView<T>>, ConfigView<T>> {
private final T delegate;
private final Class<T> clazz;
private ConfigViewCacheLoader(T delegate, Class<T> clazz) {
this.delegate = delegate;
this.clazz = clazz;
}
@Override
public ConfigView<T> load(Class<? extends ConfigView<T>> key) {
ThrowingFunction<T, ConfigView<T>, Exception> creator;
try {
Method builderMethod = key.getMethod("of", this.delegate.getClass());
creator = config -> key.cast(builderMethod.invoke(null, config));
} catch (NoSuchMethodException e) {
try {
Constructor<? extends ConfigView<T>> constructor = key.getConstructor(clazz);
creator = constructor::newInstance;
} catch (NoSuchMethodException e1) {
throw new IllegalStateException(
"missing factory method of(Config) or constructor for config view", e);
}
}
try {
return creator.apply(delegate);
} catch (InvocationTargetException e) {
throw new IllegalStateException("ConfigView creator should not throw.", e);
} catch (IllegalAccessException e) {
throw new IllegalStateException("ConfigView creator should be public.", e);
} catch (ClassCastException e) {
throw new IllegalStateException(
"ConfigView creator should create correct ConfigView instance.", e);
} catch (Exception e) {
throw new IllegalStateException("Error creating ConfigView.", e);
}
}
}
}
| apache-2.0 |
mareknovotny/pnc | build-coordinator/src/test/java/org/jboss/pnc/core/test/mock/RepositorySessionMock.java | 3369 | /**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.core.test.mock;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.model.RepositoryType;
import org.jboss.pnc.spi.repositorymanager.RepositoryManagerException;
import org.jboss.pnc.spi.repositorymanager.RepositoryManagerResult;
import org.jboss.pnc.spi.repositorymanager.model.RepositoryConnectionInfo;
import org.jboss.pnc.spi.repositorymanager.model.RepositorySession;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by <a href="mailto:matejonnet@gmail.com">Matej Lazar</a> on 2015-02-02.
*/
public class RepositorySessionMock implements RepositorySession {
@Override
public RepositoryType getType() {
return RepositoryType.MAVEN;
}
@Override
public String getBuildRepositoryId() {
return "test";
}
@Override
public RepositoryConnectionInfo getConnectionInfo() {
return new RepositoryConnectionInfo() {
// TODO: This is not connected to anything...
String repo = "http://localhost:8090/api/groups/test";
@Override
public String getToolchainUrl() {
return repo;
}
@Override
public Map<String, String> getProperties() {
Map<String, String> props = new HashMap<String, String>();
props.put("altDeploymentRepository", "test::default::" + repo);
return props;
}
@Override
public String getDependencyUrl() {
return repo;
}
@Override
public String getDeployUrl() {
return repo;
}
};
}
@Override
public RepositoryManagerResult extractBuildArtifacts() throws RepositoryManagerException {
return new RepositoryManagerResult() {
@Override
public List<Artifact> getBuiltArtifacts() {
List<Artifact> builtArtifacts = new ArrayList<>();
builtArtifacts.add(getArtifact(1));
return builtArtifacts;
}
@Override
public List<Artifact> getDependencies() {
List<Artifact> dependencies = new ArrayList<>();
dependencies.add(getArtifact(10));
return dependencies;
}
};
}
private Artifact getArtifact(int i) {
Artifact artifact = new Artifact();
artifact.setId(i);
artifact.setIdentifier("test" + i);
return artifact;
}
@Override
public String getBuildSetRepositoryId() {
return null;
}
}
| apache-2.0 |
jacklotusho/Terasology | engine/src/main/java/org/terasology/persistence/typeHandling/mathTypes/Rect2iTypeHandler.java | 2250 | /*
* Copyright 2013 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.persistence.typeHandling.mathTypes;
import com.google.common.collect.Maps;
import org.terasology.math.Rect2i;
import org.terasology.math.Vector2i;
import org.terasology.persistence.typeHandling.DeserializationContext;
import org.terasology.persistence.typeHandling.PersistedData;
import org.terasology.persistence.typeHandling.PersistedDataMap;
import org.terasology.persistence.typeHandling.SerializationContext;
import org.terasology.persistence.typeHandling.SimpleTypeHandler;
import java.util.Map;
/**
* @author Immortius
*/
public class Rect2iTypeHandler extends SimpleTypeHandler<Rect2i> {
private static final String MIN_FIELD = "min";
private static final String SIZE_FIELD = "size";
@Override
public PersistedData serialize(Rect2i value, SerializationContext context) {
if (value == null) {
return context.createNull();
} else {
Map<String, PersistedData> map = Maps.newLinkedHashMap();
map.put(MIN_FIELD, context.create(value.min(), Vector2i.class));
map.put(SIZE_FIELD, context.create(value.size(), Vector2i.class));
return context.create(map);
}
}
@Override
public Rect2i deserialize(PersistedData data, DeserializationContext context) {
if (data.isValueMap()) {
PersistedDataMap map = data.getAsValueMap();
Vector2i min = context.deserializeAs(map.get(MIN_FIELD), Vector2i.class);
Vector2i size = context.deserializeAs(map.get(SIZE_FIELD), Vector2i.class);
return Rect2i.createFromMinAndSize(min, size);
}
return null;
}
}
| apache-2.0 |
pleacu/jbpm | jbpm-human-task/jbpm-human-task-core/src/main/java/org/jbpm/services/task/commands/GetTasksForProcessCommand.java | 3137 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.task.commands;
import org.jbpm.services.task.utils.ClassUtil;
import org.kie.api.runtime.Context;
import org.kie.api.task.model.Status;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.command.ProcessInstanceIdCommand;
import org.kie.internal.task.api.TaskContext;
import org.kie.internal.task.api.TaskPersistenceContext;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import java.util.List;
@XmlRootElement(name="get-tasks-for-process-command")
@XmlAccessorType(XmlAccessType.NONE)
public class GetTasksForProcessCommand extends TaskCommand<List<TaskSummary>> implements ProcessInstanceIdCommand {
private static final long serialVersionUID = -3784821014329573243L;
@XmlElement(name="process-instance-id")
@XmlSchemaType(name="long")
private Long processInstanceId;
@XmlElement
private List<Status> statuses;
@XmlElement(name="language")
@XmlSchemaType(name="string")
private String language;
public GetTasksForProcessCommand() {
}
public GetTasksForProcessCommand(Long processInstanceId, List<Status> statuses, String language) {
this.processInstanceId = processInstanceId;
this.statuses = statuses;
this.language = language;
}
@Override
public Long getProcessInstanceId() {
return processInstanceId;
}
@Override
public void setProcessInstanceId( Long processInstanceId ) {
this.processInstanceId = processInstanceId;
}
public List<Status> getStatuses() {
return statuses;
}
public void setStatuses( List<Status> statuses ) {
this.statuses = statuses;
}
public String getLanguage() {
return language;
}
public void setLanguage( String language ) {
this.language = language;
}
@Override
public List<TaskSummary> execute(Context context) {
TaskContext ctx = (TaskContext) context;
TaskPersistenceContext persistenceContext = ctx.getPersistenceContext();
List<TaskSummary> tasks = (List<TaskSummary>) persistenceContext.queryWithParametersInTransaction("TasksByStatusByProcessId",
persistenceContext.addParametersToMap("processInstanceId", processInstanceId,
"status", statuses),
ClassUtil.<List<TaskSummary>>castClass(List.class));
return tasks;
}
}
| apache-2.0 |
mariofusco/optaplanner | optaplanner-examples/src/main/java/org/optaplanner/examples/nurserostering/solver/move/EmployeeChangeMove.java | 2870 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.nurserostering.solver.move;
import java.util.Collection;
import java.util.Collections;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.optaplanner.core.impl.move.Move;
import org.optaplanner.core.impl.score.director.ScoreDirector;
import org.optaplanner.examples.nurserostering.domain.Employee;
import org.optaplanner.examples.nurserostering.domain.ShiftAssignment;
public class EmployeeChangeMove implements Move {
private ShiftAssignment shiftAssignment;
private Employee toEmployee;
public EmployeeChangeMove(ShiftAssignment shiftAssignment, Employee toEmployee) {
this.shiftAssignment = shiftAssignment;
this.toEmployee = toEmployee;
}
public boolean isMoveDoable(ScoreDirector scoreDirector) {
return !ObjectUtils.equals(shiftAssignment.getEmployee(), toEmployee);
}
public Move createUndoMove(ScoreDirector scoreDirector) {
return new EmployeeChangeMove(shiftAssignment, shiftAssignment.getEmployee());
}
public void doMove(ScoreDirector scoreDirector) {
NurseRosteringMoveHelper.moveEmployee(scoreDirector, shiftAssignment, toEmployee);
}
public Collection<? extends Object> getPlanningEntities() {
return Collections.singletonList(shiftAssignment);
}
public Collection<? extends Object> getPlanningValues() {
return Collections.singletonList(toEmployee);
}
public boolean equals(Object o) {
if (this == o) {
return true;
} else if (o instanceof EmployeeChangeMove) {
EmployeeChangeMove other = (EmployeeChangeMove) o;
return new EqualsBuilder()
.append(shiftAssignment, other.shiftAssignment)
.append(toEmployee, other.toEmployee)
.isEquals();
} else {
return false;
}
}
public int hashCode() {
return new HashCodeBuilder()
.append(shiftAssignment)
.append(toEmployee)
.toHashCode();
}
public String toString() {
return shiftAssignment + " => " + toEmployee;
}
}
| apache-2.0 |
tufangorel/hazelcast | hazelcast/src/test/java/com/hazelcast/cache/merge/CacheSplitBrainTest.java | 15680 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache.merge;
import com.hazelcast.cache.ICache;
import com.hazelcast.config.Config;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.spi.merge.DiscardMergePolicy;
import com.hazelcast.spi.merge.HigherHitsMergePolicy;
import com.hazelcast.spi.merge.LatestAccessMergePolicy;
import com.hazelcast.spi.merge.LatestUpdateMergePolicy;
import com.hazelcast.spi.merge.PassThroughMergePolicy;
import com.hazelcast.spi.merge.PutIfAbsentMergePolicy;
import com.hazelcast.spi.merge.SplitBrainMergePolicy;
import com.hazelcast.test.HazelcastParallelParametersRunnerFactory;
import com.hazelcast.test.SplitBrainTestSupport;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.test.backup.BackupAccessor;
import com.hazelcast.test.backup.TestBackupUtils;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.util.Collection;
import static com.hazelcast.config.InMemoryFormat.BINARY;
import static com.hazelcast.config.InMemoryFormat.OBJECT;
import static com.hazelcast.test.backup.TestBackupUtils.assertBackupEntryEqualsEventually;
import static com.hazelcast.test.backup.TestBackupUtils.assertBackupEntryNullEventually;
import static com.hazelcast.test.backup.TestBackupUtils.assertBackupSizeEventually;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
/**
* Tests different split-brain scenarios for {@link com.hazelcast.cache.ICache}.
* <p>
* Most merge policies are tested with {@link InMemoryFormat#BINARY} only, since they don't check the value.
* <p>
* The {@link MergeIntegerValuesMergePolicy} is tested with both in-memory formats, since it's using the value to merge.
* <p>
* The {@link DiscardMergePolicy}, {@link PassThroughMergePolicy} and {@link PutIfAbsentMergePolicy} are also
* tested with a data structure, which is only created in the smaller cluster.
*/
@RunWith(Parameterized.class)
@UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class)
@Category({QuickTest.class, ParallelTest.class})
@SuppressWarnings("WeakerAccess")
public class CacheSplitBrainTest extends SplitBrainTestSupport {
@Parameters(name = "format:{0}, mergePolicy:{1}")
public static Collection<Object[]> parameters() {
return asList(new Object[][]{
{BINARY, DiscardMergePolicy.class},
{BINARY, HigherHitsMergePolicy.class},
{BINARY, LatestAccessMergePolicy.class},
{BINARY, PassThroughMergePolicy.class},
{BINARY, PutIfAbsentMergePolicy.class},
{BINARY, MergeIntegerValuesMergePolicy.class},
{OBJECT, MergeIntegerValuesMergePolicy.class},
});
}
@Parameter
public InMemoryFormat inMemoryFormat;
@Parameter(value = 1)
public Class<? extends SplitBrainMergePolicy> mergePolicyClass;
protected String cacheNameA = randomMapName("cacheA-");
protected String cacheNameB = randomMapName("cacheB-");
protected ICache<Object, Object> cacheA1;
protected ICache<Object, Object> cacheA2;
protected ICache<Object, Object> cacheB1;
protected ICache<Object, Object> cacheB2;
protected BackupAccessor<Object, Object> backupCacheA;
protected BackupAccessor<Object, Object> backupCacheB;
protected MergeLifecycleListener mergeLifecycleListener;
@Override
protected Config config() {
Config config = super.config();
config.getCacheConfig(cacheNameA)
.setInMemoryFormat(inMemoryFormat)
.setBackupCount(1)
.setAsyncBackupCount(0)
.setStatisticsEnabled(false)
.setMergePolicy(mergePolicyClass.getName());
config.getCacheConfig(cacheNameB)
.setInMemoryFormat(inMemoryFormat)
.setBackupCount(1)
.setAsyncBackupCount(0)
.setStatisticsEnabled(false)
.setMergePolicy(mergePolicyClass.getName());
return config;
}
@Override
protected void onBeforeSplitBrainCreated(HazelcastInstance[] instances) {
waitAllForSafeState(instances);
BackupAccessor<Object, Object> accessor = TestBackupUtils.newCacheAccessor(instances, cacheNameA);
assertEquals("backupCache should contain 0 entries", 0, accessor.size());
}
@Override
protected void onAfterSplitBrainCreated(HazelcastInstance[] firstBrain, HazelcastInstance[] secondBrain) {
mergeLifecycleListener = new MergeLifecycleListener(secondBrain.length);
for (HazelcastInstance instance : secondBrain) {
instance.getLifecycleService().addLifecycleListener(mergeLifecycleListener);
}
cacheA1 = firstBrain[0].getCacheManager().getCache(cacheNameA);
cacheA2 = secondBrain[0].getCacheManager().getCache(cacheNameA);
cacheB2 = secondBrain[0].getCacheManager().getCache(cacheNameB);
if (mergePolicyClass == DiscardMergePolicy.class) {
afterSplitDiscardMergePolicy();
} else if (mergePolicyClass == HigherHitsMergePolicy.class) {
afterSplitHigherHitsMergePolicy();
} else if (mergePolicyClass == LatestAccessMergePolicy.class) {
afterSplitLatestAccessMergePolicy();
} else if (mergePolicyClass == LatestUpdateMergePolicy.class) {
afterSplitLatestUpdateMergePolicy();
} else if (mergePolicyClass == PassThroughMergePolicy.class) {
afterSplitPassThroughMergePolicy();
} else if (mergePolicyClass == PutIfAbsentMergePolicy.class) {
afterSplitPutIfAbsentMergePolicy();
} else if (mergePolicyClass == MergeIntegerValuesMergePolicy.class) {
afterSplitCustomMergePolicy();
} else {
fail();
}
}
@Override
protected void onAfterSplitBrainHealed(HazelcastInstance[] instances) {
// wait until merge completes
mergeLifecycleListener.await();
cacheB1 = instances[0].getCacheManager().getCache(cacheNameB);
backupCacheA = TestBackupUtils.newCacheAccessor(instances, cacheNameA);
backupCacheB = TestBackupUtils.newCacheAccessor(instances, cacheNameB);
if (mergePolicyClass == DiscardMergePolicy.class) {
afterMergeDiscardMergePolicy();
} else if (mergePolicyClass == HigherHitsMergePolicy.class) {
afterMergeHigherHitsMergePolicy();
} else if (mergePolicyClass == LatestAccessMergePolicy.class) {
afterMergeLatestAccessMergePolicy();
} else if (mergePolicyClass == LatestUpdateMergePolicy.class) {
afterMergeLatestUpdateMergePolicy();
} else if (mergePolicyClass == PassThroughMergePolicy.class) {
afterMergePassThroughMergePolicy();
} else if (mergePolicyClass == PutIfAbsentMergePolicy.class) {
afterMergePutIfAbsentMergePolicy();
} else if (mergePolicyClass == MergeIntegerValuesMergePolicy.class) {
afterMergeCustomMergePolicy();
} else {
fail();
}
}
private void afterSplitDiscardMergePolicy() {
cacheA1.put("key1", "value1");
cacheA2.put("key1", "DiscardedValue1");
cacheA2.put("key2", "DiscardedValue2");
cacheB2.put("key", "DiscardedValue");
}
private void afterMergeDiscardMergePolicy() {
assertEquals("value1", cacheA1.get("key1"));
assertEquals("value1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "value1", backupCacheA);
assertNull(cacheA1.get("key2"));
assertNull(cacheA2.get("key2"));
assertBackupEntryNullEventually("key2", backupCacheA);
assertEquals(1, cacheA1.size());
assertEquals(1, cacheA2.size());
assertBackupSizeEventually(1, backupCacheA);
assertNull(cacheB1.get("key"));
assertNull(cacheB2.get("key"));
assertBackupEntryNullEventually("key", backupCacheB);
assertEquals(0, cacheB1.size());
assertEquals(0, cacheB2.size());
assertBackupSizeEventually(0, backupCacheB);
}
private void afterSplitHigherHitsMergePolicy() {
cacheA1.put("key1", "higherHitsValue1");
cacheA1.put("key2", "value2");
// increase hits number
assertEquals("higherHitsValue1", cacheA1.get("key1"));
assertEquals("higherHitsValue1", cacheA1.get("key1"));
cacheA2.put("key1", "value1");
cacheA2.put("key2", "higherHitsValue2");
// increase hits number
assertEquals("higherHitsValue2", cacheA2.get("key2"));
assertEquals("higherHitsValue2", cacheA2.get("key2"));
}
private void afterMergeHigherHitsMergePolicy() {
assertEquals("higherHitsValue1", cacheA1.get("key1"));
assertEquals("higherHitsValue1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "higherHitsValue1", backupCacheA);
assertEquals("higherHitsValue2", cacheA1.get("key2"));
assertEquals("higherHitsValue2", cacheA2.get("key2"));
assertBackupEntryEqualsEventually("key2", "higherHitsValue2", backupCacheA);
assertEquals(2, cacheA1.size());
assertEquals(2, cacheA2.size());
assertBackupSizeEventually(2, backupCacheA);
}
private void afterSplitLatestAccessMergePolicy() {
cacheA1.put("key1", "value1");
// access to record
assertEquals("value1", cacheA1.get("key1"));
// prevent updating at the same time
sleepAtLeastMillis(100);
cacheA2.put("key1", "LatestAccessedValue1");
// access to record
assertEquals("LatestAccessedValue1", cacheA2.get("key1"));
cacheA2.put("key2", "value2");
// access to record
assertEquals("value2", cacheA2.get("key2"));
// prevent updating at the same time
sleepAtLeastMillis(100);
cacheA1.put("key2", "LatestAccessedValue2");
// access to record
assertEquals("LatestAccessedValue2", cacheA1.get("key2"));
}
private void afterMergeLatestAccessMergePolicy() {
assertEquals("LatestAccessedValue1", cacheA1.get("key1"));
assertEquals("LatestAccessedValue1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "LatestAccessedValue1", backupCacheA);
assertEquals("LatestAccessedValue2", cacheA1.get("key2"));
assertEquals("LatestAccessedValue2", cacheA2.get("key2"));
assertBackupEntryEqualsEventually("key2", "LatestAccessedValue2", backupCacheA);
assertEquals(2, cacheA1.size());
assertEquals(2, cacheA2.size());
assertBackupSizeEventually(2, backupCacheA);
}
private void afterSplitLatestUpdateMergePolicy() {
cacheA1.put("key1", "value1");
// prevent updating at the same time
sleepAtLeastMillis(100);
cacheA2.put("key1", "LatestUpdatedValue1");
cacheA2.put("key2", "value2");
// prevent updating at the same time
sleepAtLeastMillis(100);
cacheA1.put("key2", "LatestUpdatedValue2");
}
private void afterMergeLatestUpdateMergePolicy() {
assertEquals("LatestUpdatedValue1", cacheA1.get("key1"));
assertEquals("LatestUpdatedValue1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "LatestUpdatedValue1", backupCacheA);
assertEquals("LatestUpdatedValue2", cacheA1.get("key2"));
assertEquals("LatestUpdatedValue2", cacheA2.get("key2"));
assertBackupEntryEqualsEventually("key2", "LatestUpdatedValue2", backupCacheA);
assertEquals(2, cacheA1.size());
assertEquals(2, cacheA2.size());
assertBackupSizeEventually(2, backupCacheA);
}
private void afterSplitPassThroughMergePolicy() {
cacheA1.put("key1", "value1");
cacheA2.put("key1", "PassThroughValue1");
cacheA2.put("key2", "PassThroughValue2");
cacheB2.put("key", "PutIfAbsentValue");
}
private void afterMergePassThroughMergePolicy() {
assertEquals("PassThroughValue1", cacheA1.get("key1"));
assertEquals("PassThroughValue1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "PassThroughValue1", backupCacheA);
assertEquals("PassThroughValue2", cacheA1.get("key2"));
assertEquals("PassThroughValue2", cacheA2.get("key2"));
assertBackupEntryEqualsEventually("key2", "PassThroughValue2", backupCacheA);
assertEquals(2, cacheA1.size());
assertEquals(2, cacheA2.size());
assertBackupSizeEventually(2, backupCacheA);
assertEquals("PutIfAbsentValue", cacheB1.get("key"));
assertEquals("PutIfAbsentValue", cacheB2.get("key"));
assertBackupEntryEqualsEventually("key", "PutIfAbsentValue", backupCacheB);
assertEquals(1, cacheB1.size());
assertEquals(1, cacheB2.size());
assertBackupSizeEventually(1, backupCacheB);
}
private void afterSplitPutIfAbsentMergePolicy() {
cacheA1.put("key1", "PutIfAbsentValue1");
cacheA2.put("key1", "value");
cacheA2.put("key2", "PutIfAbsentValue2");
cacheB2.put("key", "PutIfAbsentValue");
}
private void afterMergePutIfAbsentMergePolicy() {
assertEquals("PutIfAbsentValue1", cacheA1.get("key1"));
assertEquals("PutIfAbsentValue1", cacheA2.get("key1"));
assertBackupEntryEqualsEventually("key1", "PutIfAbsentValue1", backupCacheA);
assertEquals("PutIfAbsentValue2", cacheA1.get("key2"));
assertEquals("PutIfAbsentValue2", cacheA2.get("key2"));
assertBackupEntryEqualsEventually("key2", "PutIfAbsentValue2", backupCacheA);
assertEquals(2, cacheA1.size());
assertEquals(2, cacheA2.size());
assertBackupSizeEventually(2, backupCacheA);
assertEquals("PutIfAbsentValue", cacheB1.get("key"));
assertEquals("PutIfAbsentValue", cacheB2.get("key"));
assertBackupEntryEqualsEventually("key", "PutIfAbsentValue", backupCacheB);
assertEquals(1, cacheB1.size());
assertEquals(1, cacheB2.size());
assertBackupSizeEventually(1, backupCacheB);
}
private void afterSplitCustomMergePolicy() {
cacheA1.put("key", "value");
cacheA2.put("key", 1);
}
private void afterMergeCustomMergePolicy() {
assertEquals(1, cacheA1.get("key"));
assertEquals(1, cacheA2.get("key"));
assertBackupEntryEqualsEventually("key", 1, backupCacheA);
assertEquals(1, cacheA1.size());
assertEquals(1, cacheA2.size());
assertBackupSizeEventually(1, backupCacheA);
}
}
| apache-2.0 |
mosoft521/guava | guava-testlib/src/com/google/common/collect/testing/FeatureSpecificTestSuiteBuilder.java | 10239 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect.testing;
import static java.util.Collections.disjoint;
import static java.util.logging.Level.FINER;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.testing.features.ConflictingRequirementsException;
import com.google.common.collect.testing.features.Feature;
import com.google.common.collect.testing.features.FeatureUtil;
import com.google.common.collect.testing.features.TesterRequirements;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Creates, based on your criteria, a JUnit test suite that exhaustively tests the object generated
* by a G, selecting appropriate tests by matching them against specified features.
*
* @param <B> The concrete type of this builder (the 'self-type'). All the Builder methods of this
* class (such as {@link #named}) return this type, so that Builder methods of more derived
* classes can be chained onto them without casting.
* @param <G> The type of the generator to be passed to testers in the generated test suite. An
* instance of G should somehow provide an instance of the class under test, plus any other
* information required to parameterize the test.
* @author George van den Driessche
*/
@GwtIncompatible
public abstract class FeatureSpecificTestSuiteBuilder<
B extends FeatureSpecificTestSuiteBuilder<B, G>, G> {
@SuppressWarnings("unchecked")
protected B self() {
return (B) this;
}
// Test Data
private G subjectGenerator;
// Gets run before every test.
private Runnable setUp;
// Gets run at the conclusion of every test.
private Runnable tearDown;
protected B usingGenerator(G subjectGenerator) {
this.subjectGenerator = subjectGenerator;
return self();
}
public G getSubjectGenerator() {
return subjectGenerator;
}
public B withSetUp(Runnable setUp) {
this.setUp = setUp;
return self();
}
public Runnable getSetUp() {
return setUp;
}
public B withTearDown(Runnable tearDown) {
this.tearDown = tearDown;
return self();
}
public Runnable getTearDown() {
return tearDown;
}
// Features
private final Set<Feature<?>> features = new LinkedHashSet<>();
/**
* Configures this builder to produce tests appropriate for the given features. This method may be
* called more than once to add features in multiple groups.
*/
public B withFeatures(Feature<?>... features) {
return withFeatures(Arrays.asList(features));
}
public B withFeatures(Iterable<? extends Feature<?>> features) {
for (Feature<?> feature : features) {
this.features.add(feature);
}
return self();
}
public Set<Feature<?>> getFeatures() {
return Collections.unmodifiableSet(features);
}
// Name
private String name;
/** Configures this builder produce a TestSuite with the given name. */
public B named(String name) {
if (name.contains("(")) {
throw new IllegalArgumentException(
"Eclipse hides all characters after "
+ "'('; please use '[]' or other characters instead of parentheses");
}
this.name = name;
return self();
}
public String getName() {
return name;
}
// Test suppression
private final Set<Method> suppressedTests = new HashSet<>();
/**
* Prevents the given methods from being run as part of the test suite.
*
* <p><em>Note:</em> in principle this should never need to be used, but it might be useful if the
* semantics of an implementation disagree in unforeseen ways with the semantics expected by a
* test, or to keep dependent builds clean in spite of an erroneous test.
*/
public B suppressing(Method... methods) {
return suppressing(Arrays.asList(methods));
}
public B suppressing(Collection<Method> methods) {
suppressedTests.addAll(methods);
return self();
}
public Set<Method> getSuppressedTests() {
return suppressedTests;
}
private static final Logger logger =
Logger.getLogger(FeatureSpecificTestSuiteBuilder.class.getName());
/** Creates a runnable JUnit test suite based on the criteria already given. */
/*
* Class parameters must be raw. This annotation should go on testerClass in
* the for loop, but the 1.5 javac crashes on annotations in for loops:
* <http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6294589>
*/
@SuppressWarnings("unchecked")
public TestSuite createTestSuite() {
checkCanCreate();
logger.fine(" Testing: " + name);
logger.fine("Features: " + formatFeatureSet(features));
FeatureUtil.addImpliedFeatures(features);
logger.fine("Expanded: " + formatFeatureSet(features));
// Class parameters must be raw.
List<Class<? extends AbstractTester>> testers = getTesters();
TestSuite suite = new TestSuite(name);
for (Class<? extends AbstractTester> testerClass : testers) {
TestSuite testerSuite =
makeSuiteForTesterClass((Class<? extends AbstractTester<?>>) testerClass);
if (testerSuite.countTestCases() > 0) {
suite.addTest(testerSuite);
}
}
return suite;
}
/** Throw {@link IllegalStateException} if {@link #createTestSuite()} can't be called yet. */
protected void checkCanCreate() {
if (subjectGenerator == null) {
throw new IllegalStateException("Call using() before createTestSuite().");
}
if (name == null) {
throw new IllegalStateException("Call named() before createTestSuite().");
}
if (features == null) {
throw new IllegalStateException("Call withFeatures() before createTestSuite().");
}
}
// Class parameters must be raw.
protected abstract List<Class<? extends AbstractTester>> getTesters();
private boolean matches(Test test) {
Method method;
try {
method = extractMethod(test);
} catch (IllegalArgumentException e) {
logger.finer(Platform.format("%s: including by default: %s", test, e.getMessage()));
return true;
}
if (suppressedTests.contains(method)) {
logger.finer(Platform.format("%s: excluding because it was explicitly suppressed.", test));
return false;
}
TesterRequirements requirements;
try {
requirements = FeatureUtil.getTesterRequirements(method);
} catch (ConflictingRequirementsException e) {
throw new RuntimeException(e);
}
if (!features.containsAll(requirements.getPresentFeatures())) {
if (logger.isLoggable(FINER)) {
Set<Feature<?>> missingFeatures = Helpers.copyToSet(requirements.getPresentFeatures());
missingFeatures.removeAll(features);
logger.finer(
Platform.format(
"%s: skipping because these features are absent: %s", method, missingFeatures));
}
return false;
}
if (intersect(features, requirements.getAbsentFeatures())) {
if (logger.isLoggable(FINER)) {
Set<Feature<?>> unwantedFeatures = Helpers.copyToSet(requirements.getAbsentFeatures());
unwantedFeatures.retainAll(features);
logger.finer(
Platform.format(
"%s: skipping because these features are present: %s", method, unwantedFeatures));
}
return false;
}
return true;
}
private static boolean intersect(Set<?> a, Set<?> b) {
return !disjoint(a, b);
}
private static Method extractMethod(Test test) {
if (test instanceof AbstractTester) {
AbstractTester<?> tester = (AbstractTester<?>) test;
return Helpers.getMethod(tester.getClass(), tester.getTestMethodName());
} else if (test instanceof TestCase) {
TestCase testCase = (TestCase) test;
return Helpers.getMethod(testCase.getClass(), testCase.getName());
} else {
throw new IllegalArgumentException("unable to extract method from test: not a TestCase.");
}
}
protected TestSuite makeSuiteForTesterClass(Class<? extends AbstractTester<?>> testerClass) {
TestSuite candidateTests = new TestSuite(testerClass);
TestSuite suite = filterSuite(candidateTests);
Enumeration<?> allTests = suite.tests();
while (allTests.hasMoreElements()) {
Object test = allTests.nextElement();
if (test instanceof AbstractTester) {
@SuppressWarnings("unchecked")
AbstractTester<? super G> tester = (AbstractTester<? super G>) test;
tester.init(subjectGenerator, name, setUp, tearDown);
}
}
return suite;
}
private TestSuite filterSuite(TestSuite suite) {
TestSuite filtered = new TestSuite(suite.getName());
Enumeration<?> tests = suite.tests();
while (tests.hasMoreElements()) {
Test test = (Test) tests.nextElement();
if (matches(test)) {
filtered.addTest(test);
}
}
return filtered;
}
protected static String formatFeatureSet(Set<? extends Feature<?>> features) {
List<String> temp = new ArrayList<>();
for (Feature<?> feature : features) {
Object featureAsObject = feature; // to work around bogus JDK warning
if (featureAsObject instanceof Enum) {
Enum<?> f = (Enum<?>) featureAsObject;
temp.add(f.getDeclaringClass().getSimpleName() + "." + feature);
} else {
temp.add(feature.toString());
}
}
return temp.toString();
}
}
| apache-2.0 |
miniway/presto | presto-parser/src/main/java/io/prestosql/sql/tree/Table.java | 2055 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.tree;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
public class Table
extends QueryBody
{
private final QualifiedName name;
public Table(QualifiedName name)
{
this(Optional.empty(), name);
}
public Table(NodeLocation location, QualifiedName name)
{
this(Optional.of(location), name);
}
private Table(Optional<NodeLocation> location, QualifiedName name)
{
super(location);
this.name = name;
}
public QualifiedName getName()
{
return name;
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context)
{
return visitor.visitTable(this, context);
}
@Override
public List<Node> getChildren()
{
return ImmutableList.of();
}
@Override
public String toString()
{
return toStringHelper(this)
.addValue(name)
.toString();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Table table = (Table) o;
return Objects.equals(name, table.name);
}
@Override
public int hashCode()
{
return name.hashCode();
}
}
| apache-2.0 |
jorgemoralespou/rtgov | modules/event-processor-network/epn-core/src/main/java/org/overlord/rtgov/epn/EventList.java | 5358 | /*
* 2012-3 Red Hat Inc. and/or its affiliates and other contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.rtgov.epn;
import java.io.IOException;
import java.io.ObjectStreamClass;
import java.io.Serializable;
import java.util.Iterator;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* This class represents a list of events that can be serialized.
*
*/
public class EventList implements java.io.Serializable, java.lang.Iterable<java.io.Serializable> {
private static final long serialVersionUID = 4108141437156875407L;
private transient java.util.List<? extends java.io.Serializable> _list=null;
private byte[] _serializedList=null;
private static final Logger LOG=Logger.getLogger(EventList.class.getName());
/**
* The default constructor.
*/
public EventList() {
}
/**
* This method represents a constructor to initialize the event list
* from a standard Java list.
*
* @param list The list of events
*/
public EventList(java.util.List<? extends java.io.Serializable> list) {
_list = list;
try {
java.io.ByteArrayOutputStream baos=new java.io.ByteArrayOutputStream();
java.io.ObjectOutputStream oos=new java.io.ObjectOutputStream(baos);
oos.writeObject(list);
oos.close();
baos.close();
_serializedList = baos.toByteArray();
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Serialized event list: "+_list);
}
} catch (Throwable e) {
String mesg=java.util.PropertyResourceBundle.getBundle(
"epn-core.Messages").getString("EPN-CORE-3");
LOG.severe(mesg);
throw new IllegalArgumentException(mesg, e);
}
}
/**
* This method resolves the contained list.
*
* @param cl The classloader
*/
@SuppressWarnings("unchecked")
protected void resolve(final java.lang.ClassLoader cl) {
try {
java.io.ByteArrayInputStream bais=new java.io.ByteArrayInputStream(_serializedList);
java.io.ObjectInputStream ois=new java.io.ObjectInputStream(bais) {
protected Class<?> resolveClass(ObjectStreamClass desc)
throws IOException, ClassNotFoundException {
return (Class.forName(desc.getName(), false, cl));
}
};
_list = (java.util.List<? extends Serializable>)ois.readObject();
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Deserialized event list: "+_list);
}
ois.close();
bais.close();
} catch (Throwable e) {
String mesg=java.util.PropertyResourceBundle.getBundle(
"epn-core.Messages").getString("EPN-CORE-4");
LOG.severe(mesg);
throw new IllegalArgumentException(mesg, e);
}
}
/**
* This method resets the contents so they are no longer available
* until resolved under another classloader.
*
*/
protected void reset() {
_list = null;
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
public Iterator<Serializable> iterator() {
if (_list != null) {
return ((Iterator<Serializable>)_list.iterator());
}
if (LOG.isLoggable(Level.FINEST)) {
LOG.finest("Event list is null");
}
return (null);
}
/**
* This method determines whether the event is contained
* within the list.
*
* @param evt The event
* @return Whether the event is contained in the list
*/
public boolean contains(Serializable evt) {
return (_list.contains(evt));
}
/**
* This method returns the event at the specified index.
*
* @param index The index
* @return The event
* @throws IndexOutOfBoundsException Index is out of bounds
*/
public Serializable get(int index) throws IndexOutOfBoundsException {
if (_list != null) {
return (_list.get(index));
}
return (null);
}
/**
* This method returns the number of events.
*
* @return The number of events
*/
public int size() {
if (_list != null) {
return (_list.size());
}
return (0);
}
/**
* {@inheritDoc}
*/
public String toString() {
return (_list == null ? "<Unresolved EventList>" : _list.toString());
}
}
| apache-2.0 |
sdgdsffdsfff/drools-wb | drools-wb-screens/drools-wb-guided-dtable-editor/drools-wb-guided-dtable-editor-client/src/test/java/org/drools/workbench/screens/guided/dtable/client/widget/analysis/condition/NumericIntegerConditionInspectorSubsumptionTest.java | 15773 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.guided.dtable.client.widget.analysis.condition;
import org.drools.workbench.models.guided.dtable.shared.model.Pattern52;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class NumericIntegerConditionInspectorSubsumptionTest {
@Test
public void testSubsume001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, "!=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "==" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals002() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "==" );
NumericIntegerConditionInspector b = getCondition( 10, ">" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals003() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "==" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals004() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "==" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals005() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "==" );
NumericIntegerConditionInspector b = getCondition( 0, "==" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals006() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "==" );
NumericIntegerConditionInspector b = getCondition( 0, "==" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals007() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "==" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEquals008() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "==" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeGreaterThan001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, ">" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeGreaterThan002() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeGreaterThan003() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 10, ">" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume004() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 0, ">=" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsume005() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 1, ">=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsume006() throws Exception {
NumericIntegerConditionInspector a = getCondition( 1, ">" );
NumericIntegerConditionInspector b = getCondition( 0, ">=" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsume007AndLicenseToTest() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( -10, "==" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume008() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume009() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume010() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 0, "<=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume011() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 10, "<=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsume012() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, ">" );
NumericIntegerConditionInspector b = getCondition( 10, "==" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 10, "==" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan002() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "==" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan003() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "<=" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan004() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "<=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan005() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan006() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan007() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan008() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan009() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 1, "<" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan010() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 1, "<" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan011() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 10, "!=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan012() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "!=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeEqualsOrLessThan013() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<=" );
NumericIntegerConditionInspector b = getCondition( 0, "!=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, "!=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual002() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 1, "!=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual003() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, "==" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual004() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 10, "==" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual005() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual006() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, ">" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual007AndYouOnlyTestTwice() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, ">=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual008() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, "<=" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual009() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 10, ">=" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual010() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual011() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeNotEqual012() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "!=" );
NumericIntegerConditionInspector b = getCondition( 10, ">" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeLessThan001() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertTrue( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
@Test
public void testSubsumeLessThan002() throws Exception {
NumericIntegerConditionInspector a = getCondition( 10, "<" );
NumericIntegerConditionInspector b = getCondition( 0, "<" );
assertTrue( a.subsumes( b ) );
assertFalse( b.subsumes( a ) );
}
@Test
public void testSubsumeLessThan003() throws Exception {
NumericIntegerConditionInspector a = getCondition( 0, "<" );
NumericIntegerConditionInspector b = getCondition( 10, "<" );
assertFalse( a.subsumes( b ) );
assertTrue( b.subsumes( a ) );
}
private NumericIntegerConditionInspector getCondition( int value,
String operator ) {
return new NumericIntegerConditionInspector( mock( Pattern52.class ), "age", value, operator );
}
} | apache-2.0 |
sankarh/hive | jdbc/src/java/org/apache/hive/jdbc/saml/IJdbcBrowserClientFactory.java | 1551 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.jdbc.saml;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.jdbc.saml.IJdbcBrowserClient.HiveJdbcBrowserException;
/**
* Factory class to instantiate the {@link IJdbcBrowserClient}. This is mostly used for
* testing purposes so that test can instantiate a test browser client which can do
* browser interaction programmatically.
*/
public interface IJdbcBrowserClientFactory {
/**
* Create a {@link IJdbcBrowserClient} from a the given {@link JdbcConnectionParams}
* @throws HiveJdbcBrowserException In case of any error to instantiate the browser
* client.
*/
IJdbcBrowserClient create(JdbcConnectionParams connectionParams)
throws HiveJdbcBrowserException;
}
| apache-2.0 |
sdole/aws-sdk-java | aws-java-sdk-ses/src/main/java/com/amazonaws/services/simpleemail/model/transform/DescribeReceiptRuleSetRequestMarshaller.java | 2027 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleemail.model.transform;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.internal.ListWithAutoConstructFlag;
import com.amazonaws.services.simpleemail.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* Describe Receipt Rule Set Request Marshaller
*/
public class DescribeReceiptRuleSetRequestMarshaller implements Marshaller<Request<DescribeReceiptRuleSetRequest>, DescribeReceiptRuleSetRequest> {
public Request<DescribeReceiptRuleSetRequest> marshall(DescribeReceiptRuleSetRequest describeReceiptRuleSetRequest) {
if (describeReceiptRuleSetRequest == null) {
throw new AmazonClientException("Invalid argument passed to marshall(...)");
}
Request<DescribeReceiptRuleSetRequest> request = new DefaultRequest<DescribeReceiptRuleSetRequest>(describeReceiptRuleSetRequest, "AmazonSimpleEmailService");
request.addParameter("Action", "DescribeReceiptRuleSet");
request.addParameter("Version", "2010-12-01");
if (describeReceiptRuleSetRequest.getRuleSetName() != null) {
request.addParameter("RuleSetName", StringUtils.fromString(describeReceiptRuleSetRequest.getRuleSetName()));
}
return request;
}
}
| apache-2.0 |
ravisund/Kundera | src/kundera-hbase/kundera-hbase/src/test/java/com/impetus/client/hbase/schemaManager/HBaseEntityAddressUni1ToM.java | 2022 | /*******************************************************************************
* * Copyright 2012 Impetus Infotech.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
******************************************************************************/
package com.impetus.client.hbase.schemaManager;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* The Class HbaseEntityAddressUni1ToM.
*/
@Entity
@Table(name = "HbaseEntityAddressUni1ToM", schema = "KunderaHbaseExamples@hbase")
public class HBaseEntityAddressUni1ToM
{
/** The address id. */
@Id
@Column(name = "ADDRESS_ID")
private String addressId;
/** The street. */
@Column(name = "STREET")
private String street;
/**
* Gets the address id.
*
* @return the address id
*/
public String getAddressId()
{
return addressId;
}
/**
* Sets the address id.
*
* @param addressId
* the new address id
*/
public void setAddressId(String addressId)
{
this.addressId = addressId;
}
/**
* Gets the street.
*
* @return the street
*/
public String getStreet()
{
return street;
}
/**
* Sets the street.
*
* @param street
* the new street
*/
public void setStreet(String street)
{
this.street = street;
}
}
| apache-2.0 |
yangweigbh/libgdx | extensions/gdx-bullet/jni/swig-src/collision/com/badlogic/gdx/physics/bullet/collision/btManifoldPoint.java | 9556 | /* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 3.0.10
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.badlogic.gdx.physics.bullet.collision;
import com.badlogic.gdx.physics.bullet.BulletBase;
import com.badlogic.gdx.physics.bullet.linearmath.*;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.Quaternion;
import com.badlogic.gdx.math.Matrix3;
import com.badlogic.gdx.math.Matrix4;
public class btManifoldPoint extends BulletBase {
private long swigCPtr;
protected btManifoldPoint(final String className, long cPtr, boolean cMemoryOwn) {
super(className, cPtr, cMemoryOwn);
swigCPtr = cPtr;
}
/** Construct a new btManifoldPoint, normally you should not need this constructor it's intended for low-level usage. */
public btManifoldPoint(long cPtr, boolean cMemoryOwn) {
this("btManifoldPoint", cPtr, cMemoryOwn);
construct();
}
@Override
protected void reset(long cPtr, boolean cMemoryOwn) {
if (!destroyed)
destroy();
super.reset(swigCPtr = cPtr, cMemoryOwn);
}
public static long getCPtr(btManifoldPoint obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
@Override
protected void finalize() throws Throwable {
if (!destroyed)
destroy();
super.finalize();
}
@Override protected synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
CollisionJNI.delete_btManifoldPoint(swigCPtr);
}
swigCPtr = 0;
}
super.delete();
}
private final static btManifoldPoint temp = new btManifoldPoint(0, false);
/** Obtains a temporary instance, used by native methods that return a btManifoldPoint instance */
protected static btManifoldPoint obtainTemp(long cPtr, boolean own) {
temp.reset(cPtr, own);
return temp;
}
private static btManifoldPoint[] argumentInstances = new btManifoldPoint[] {new btManifoldPoint(0, false),
new btManifoldPoint(0, false), new btManifoldPoint(0, false), new btManifoldPoint(0, false)};
private static int argumentIndex = -1;
/** Obtains a temporary instance, used for callback methods with one or more btManifoldPoint arguments */
protected static btManifoldPoint obtainForArgument(final long swigCPtr, boolean owner) {
btManifoldPoint instance = argumentInstances[argumentIndex = (argumentIndex + 1) & 3];
instance.reset(swigCPtr, owner);
return instance;
}
public btManifoldPoint() {
this(CollisionJNI.new_btManifoldPoint__SWIG_0(), true);
}
public btManifoldPoint(Vector3 pointA, Vector3 pointB, Vector3 normal, float distance) {
this(CollisionJNI.new_btManifoldPoint__SWIG_1(pointA, pointB, normal, distance), true);
}
public void setDistance1(float value) {
CollisionJNI.btManifoldPoint_distance1_set(swigCPtr, this, value);
}
public float getDistance1() {
return CollisionJNI.btManifoldPoint_distance1_get(swigCPtr, this);
}
public void setCombinedFriction(float value) {
CollisionJNI.btManifoldPoint_combinedFriction_set(swigCPtr, this, value);
}
public float getCombinedFriction() {
return CollisionJNI.btManifoldPoint_combinedFriction_get(swigCPtr, this);
}
public void setCombinedRollingFriction(float value) {
CollisionJNI.btManifoldPoint_combinedRollingFriction_set(swigCPtr, this, value);
}
public float getCombinedRollingFriction() {
return CollisionJNI.btManifoldPoint_combinedRollingFriction_get(swigCPtr, this);
}
public void setCombinedRestitution(float value) {
CollisionJNI.btManifoldPoint_combinedRestitution_set(swigCPtr, this, value);
}
public float getCombinedRestitution() {
return CollisionJNI.btManifoldPoint_combinedRestitution_get(swigCPtr, this);
}
public void setPartId0(int value) {
CollisionJNI.btManifoldPoint_partId0_set(swigCPtr, this, value);
}
public int getPartId0() {
return CollisionJNI.btManifoldPoint_partId0_get(swigCPtr, this);
}
public void setPartId1(int value) {
CollisionJNI.btManifoldPoint_partId1_set(swigCPtr, this, value);
}
public int getPartId1() {
return CollisionJNI.btManifoldPoint_partId1_get(swigCPtr, this);
}
public void setIndex0(int value) {
CollisionJNI.btManifoldPoint_index0_set(swigCPtr, this, value);
}
public int getIndex0() {
return CollisionJNI.btManifoldPoint_index0_get(swigCPtr, this);
}
public void setIndex1(int value) {
CollisionJNI.btManifoldPoint_index1_set(swigCPtr, this, value);
}
public int getIndex1() {
return CollisionJNI.btManifoldPoint_index1_get(swigCPtr, this);
}
public void setUserPersistentData(long value) {
CollisionJNI.btManifoldPoint_userPersistentData_set(swigCPtr, this, value);
}
public long getUserPersistentData() {
return CollisionJNI.btManifoldPoint_userPersistentData_get(swigCPtr, this);
}
public void setLateralFrictionInitialized(boolean value) {
CollisionJNI.btManifoldPoint_lateralFrictionInitialized_set(swigCPtr, this, value);
}
public boolean getLateralFrictionInitialized() {
return CollisionJNI.btManifoldPoint_lateralFrictionInitialized_get(swigCPtr, this);
}
public void setAppliedImpulse(float value) {
CollisionJNI.btManifoldPoint_appliedImpulse_set(swigCPtr, this, value);
}
public float getAppliedImpulse() {
return CollisionJNI.btManifoldPoint_appliedImpulse_get(swigCPtr, this);
}
public void setAppliedImpulseLateral1(float value) {
CollisionJNI.btManifoldPoint_appliedImpulseLateral1_set(swigCPtr, this, value);
}
public float getAppliedImpulseLateral1() {
return CollisionJNI.btManifoldPoint_appliedImpulseLateral1_get(swigCPtr, this);
}
public void setAppliedImpulseLateral2(float value) {
CollisionJNI.btManifoldPoint_appliedImpulseLateral2_set(swigCPtr, this, value);
}
public float getAppliedImpulseLateral2() {
return CollisionJNI.btManifoldPoint_appliedImpulseLateral2_get(swigCPtr, this);
}
public void setContactMotion1(float value) {
CollisionJNI.btManifoldPoint_contactMotion1_set(swigCPtr, this, value);
}
public float getContactMotion1() {
return CollisionJNI.btManifoldPoint_contactMotion1_get(swigCPtr, this);
}
public void setContactMotion2(float value) {
CollisionJNI.btManifoldPoint_contactMotion2_set(swigCPtr, this, value);
}
public float getContactMotion2() {
return CollisionJNI.btManifoldPoint_contactMotion2_get(swigCPtr, this);
}
public void setContactCFM1(float value) {
CollisionJNI.btManifoldPoint_contactCFM1_set(swigCPtr, this, value);
}
public float getContactCFM1() {
return CollisionJNI.btManifoldPoint_contactCFM1_get(swigCPtr, this);
}
public void setContactCFM2(float value) {
CollisionJNI.btManifoldPoint_contactCFM2_set(swigCPtr, this, value);
}
public float getContactCFM2() {
return CollisionJNI.btManifoldPoint_contactCFM2_get(swigCPtr, this);
}
public void setLifeTime(int value) {
CollisionJNI.btManifoldPoint_lifeTime_set(swigCPtr, this, value);
}
public int getLifeTime() {
return CollisionJNI.btManifoldPoint_lifeTime_get(swigCPtr, this);
}
public float getDistance() {
return CollisionJNI.btManifoldPoint_getDistance(swigCPtr, this);
}
public void setDistance(float dist) {
CollisionJNI.btManifoldPoint_setDistance(swigCPtr, this, dist);
}
public int getUserValue() {
return CollisionJNI.btManifoldPoint_getUserValue(swigCPtr, this);
}
public void setUserValue(int value) {
CollisionJNI.btManifoldPoint_setUserValue(swigCPtr, this, value);
}
public void getLocalPointA(Vector3 out) {
CollisionJNI.btManifoldPoint_getLocalPointA(swigCPtr, this, out);
}
public void setLocalPointA(Vector3 value) {
CollisionJNI.btManifoldPoint_setLocalPointA(swigCPtr, this, value);
}
public void getLocalPointB(Vector3 out) {
CollisionJNI.btManifoldPoint_getLocalPointB(swigCPtr, this, out);
}
public void setLocalPointB(Vector3 value) {
CollisionJNI.btManifoldPoint_setLocalPointB(swigCPtr, this, value);
}
public void getPositionWorldOnA(Vector3 out) {
CollisionJNI.btManifoldPoint_getPositionWorldOnA(swigCPtr, this, out);
}
public void setPositionWorldOnA(Vector3 value) {
CollisionJNI.btManifoldPoint_setPositionWorldOnA(swigCPtr, this, value);
}
public void getPositionWorldOnB(Vector3 out) {
CollisionJNI.btManifoldPoint_getPositionWorldOnB(swigCPtr, this, out);
}
public void setPositionWorldOnB(Vector3 value) {
CollisionJNI.btManifoldPoint_setPositionWorldOnB(swigCPtr, this, value);
}
public void getNormalWorldOnB(Vector3 out) {
CollisionJNI.btManifoldPoint_getNormalWorldOnB(swigCPtr, this, out);
}
public void setNormalWorldOnB(Vector3 value) {
CollisionJNI.btManifoldPoint_setNormalWorldOnB(swigCPtr, this, value);
}
public void getLateralFrictionDir1(Vector3 out) {
CollisionJNI.btManifoldPoint_getLateralFrictionDir1(swigCPtr, this, out);
}
public void setLateralFrictionDir1(Vector3 value) {
CollisionJNI.btManifoldPoint_setLateralFrictionDir1(swigCPtr, this, value);
}
public void getLateralFrictionDir2(Vector3 out) {
CollisionJNI.btManifoldPoint_getLateralFrictionDir2(swigCPtr, this, out);
}
public void setLateralFrictionDir2(Vector3 value) {
CollisionJNI.btManifoldPoint_setLateralFrictionDir2(swigCPtr, this, value);
}
}
| apache-2.0 |
jludvice/fabric8 | common-util/src/main/java/io/fabric8/common/util/Base64Encoder.java | 7210 | /**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.common.util;
import java.nio.charset.Charset;
/**
* Base64 encoding utilities.
*/
public class Base64Encoder {
public static final Charset base64CharSet = Charset.forName("ISO-8859-1");
/**
* Utility class. ment to be used via static methods.
*/
private Base64Encoder() {
// utility class
}
/**
* Encodes a String into a base 64 String. The resulting encoding is chunked at 76 bytes.
* <p/>
*
* @param s String to encode.
* @return encoded string.
*/
public static String encode(String s) {
byte[] sBytes = s.getBytes();
sBytes = encode(sBytes);
s = new String(sBytes);
return s;
}
/**
* Decodes a base 64 String into a String.
* <p/>
*
* @param s String to decode.
* @return encoded string.
* @throws java.lang.IllegalArgumentException
* thrown if the given byte array was not valid com.sun.syndication.io.impl.Base64 encoding.
*/
public static String decode(String s)
throws IllegalArgumentException {
s = s.replaceAll("\n", "");
s = s.replaceAll("\r", "");
byte[] sBytes = s.getBytes(base64CharSet);
sBytes = decode(sBytes);
s = new String(sBytes, base64CharSet);
return s;
}
private static final byte[] ALPHASET =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=".getBytes();
private static final int I6O2 = 255 - 3;
private static final int O6I2 = 3;
private static final int I4O4 = 255 - 15;
private static final int O4I4 = 15;
private static final int I2O6 = 255 - 63;
private static final int O2I6 = 63;
/**
* Encodes a byte array into a base 64 byte array.
* <p/>
*
* @param dData byte array to encode.
* @return encoded byte array.
*/
public static byte[] encode(byte[] dData) {
if (dData == null) {
throw new IllegalArgumentException("Cannot encode null");
}
byte[] eData = new byte[((dData.length + 2) / 3) * 4];
int eIndex = 0;
for (int i = 0; i < dData.length; i += 3) {
int d1;
int d2 = 0;
int d3 = 0;
int e1;
int e2;
int e3;
int e4;
int pad = 0;
d1 = dData[i];
if ((i + 1) < dData.length) {
d2 = dData[i + 1];
if ((i + 2) < dData.length) {
d3 = dData[i + 2];
} else {
pad = 1;
}
} else {
pad = 2;
}
e1 = ALPHASET[(d1 & I6O2) >> 2];
e2 = ALPHASET[(d1 & O6I2) << 4 | (d2 & I4O4) >> 4];
e3 = ALPHASET[(d2 & O4I4) << 2 | (d3 & I2O6) >> 6];
e4 = ALPHASET[(d3 & O2I6)];
eData[eIndex++] = (byte) e1;
eData[eIndex++] = (byte) e2;
eData[eIndex++] = (pad < 2) ? (byte) e3 : (byte) '=';
eData[eIndex++] = (pad < 1) ? (byte) e4 : (byte) '=';
}
return eData;
}
private final static int[] CODES = new int[256];
static {
for (int i = 0; i < CODES.length; i++) {
CODES[i] = 64;
}
for (int i = 0; i < ALPHASET.length; i++) {
CODES[ALPHASET[i]] = i;
}
}
/**
* Dencodes a com.sun.syndication.io.impl.Base64 byte array.
* <p/>
*
* @param eData byte array to decode.
* @return decoded byte array.
* @throws java.lang.IllegalArgumentException
* thrown if the given byte array was not valid com.sun.syndication.io.impl.Base64 encoding.
*/
public static byte[] decode(byte[] eData) {
if (eData == null) {
throw new IllegalArgumentException("Cannot decode null");
}
byte[] cleanEData = eData.clone();
int cleanELength = 0;
for (byte anEData : eData) {
if (anEData < 256 && CODES[anEData] < 64) {
cleanEData[cleanELength++] = anEData;
}
}
int dLength = (cleanELength / 4) * 3;
switch (cleanELength % 4) {
case 3:
dLength += 2;
break;
case 2:
dLength++;
break;
}
byte[] dData = new byte[dLength];
int dIndex = 0;
for (int i = 0; i < eData.length; i += 4) {
if ((i + 3) > eData.length) {
throw new IllegalArgumentException(
"byte array is not a valid com.sun.syndication.io.impl.Base64 encoding"
);
}
int e1 = CODES[cleanEData[i]];
int e2 = CODES[cleanEData[i + 1]];
int e3 = CODES[cleanEData[i + 2]];
int e4 = CODES[cleanEData[i + 3]];
dData[dIndex++] = (byte) ((e1 << 2) | (e2 >> 4));
if (dIndex < dData.length) {
dData[dIndex++] = (byte) ((e2 << 4) | (e3 >> 2));
}
if (dIndex < dData.length) {
dData[dIndex++] = (byte) ((e3 << 6) | (e4));
}
}
return dData;
}
public static void main(String[] args)
throws Exception {
String s =
"\nPGRpdiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94aHRtbCI+V2UncmUgcHJvcG9zaW5nIDxhIGhy\n" +
"ZWY9Imh0dHA6Ly93d3cuZ29vZ2xlLmNvbS9jb3Jwb3JhdGUvc29mdHdhcmVfcHJpbmNpcGxlcy5odG1sIj5z\n" +
"b21lIGd1aWRlbGluZXMgPC9hPnRvIGhlbHAgY3VyYiB0aGUgcHJvYmxlbSBvZiBJbnRlcm5ldCBzb2Z0d2Fy\n" +
"ZSB0aGF0IGluc3RhbGxzIGl0c2VsZiB3aXRob3V0IHRlbGxpbmcgeW91LCBvciBiZWhhdmVzIGJhZGx5IG9u\n" +
"Y2UgaXQgZ2V0cyBvbiB5b3VyIGNvbXB1dGVyLiBXZSd2ZSBiZWVuIGhlYXJpbmcgYSBsb3Qgb2YgY29tcGxh\n" +
"aW50cyBhYm91dCB0aGlzIGxhdGVseSBhbmQgaXQgc2VlbXMgdG8gYmUgZ2V0dGluZyB3b3JzZS4gV2UgdGhp\n" +
"bmsgaXQncyBpbXBvcnRhbnQgdGhhdCB5b3UgcmV0YWluIGNvbnRyb2wgb2YgeW91ciBjb21wdXRlciBhbmQg\n" +
"dGhhdCB0aGVyZSBiZSBzb21lIGNsZWFyIHN0YW5kYXJkcyBpbiBvdXIgaW5kdXN0cnkuIExldCB1cyBrbm93\n" +
"IGlmIHlvdSB0aGluayB0aGVzZSBndWlkZWxpbmVzIGFyZSB1c2VmdWwgb3IgaWYgeW91IGhhdmUgc3VnZ2Vz\n" +
"dGlvbnMgdG8gaW1wcm92ZSB0aGVtLgo8YnIgLz4KPGJyIC8+Sm9uYXRoYW4gUm9zZW5iZXJnCjxiciAvPgo8\n" +
"L2Rpdj4K\n";
System.out.println(decode(s));
}
}
| apache-2.0 |
oplinkoms/onos | apps/cfm/api/src/main/java/org/onosproject/incubator/net/l2monitoring/cfm/service/CfmMepService.java | 2216 | /*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.net.l2monitoring.cfm.service;
import java.util.Collection;
import org.onosproject.event.ListenerService;
import org.onosproject.incubator.net.l2monitoring.cfm.Mep;
import org.onosproject.incubator.net.l2monitoring.cfm.MepEntry;
import org.onosproject.incubator.net.l2monitoring.cfm.identifier.MaIdShort;
import org.onosproject.incubator.net.l2monitoring.cfm.identifier.MdId;
import org.onosproject.net.DeviceId;
/**
* For the management of Maintenance Association Endpoints.
*
* These are dependent on the Maintenance Domain service which maintains the
* Maintenance Domain and Maintenance Associations
*/
public interface CfmMepService
extends ListenerService<CfmMepEvent, CfmMepListener>, CfmMepServiceBase {
/**
* Retrieve all {@link MepEntry}(s) belonging to an MA.
* @param mdName A Maintenance Domain
* @param maName A Maintetance Association in the MD
* @return A collection of MEP Entries
* @throws CfmConfigException If there a problem with the MD or MA
*/
Collection<MepEntry> getAllMeps(MdId mdName, MaIdShort maName)
throws CfmConfigException;
/**
* Retrieve all {@link Mep}(s) belonging to an MA.
* Note: This just returns the configuration part of the Mep, not the MepEntry
* which contains config and state
* @param deviceId A device id
* @return A collection of MEP Entries
* @throws CfmConfigException If there a problem with the MD or MA
*/
Collection<Mep> getAllMepsByDevice(DeviceId deviceId)
throws CfmConfigException;
}
| apache-2.0 |
asedunov/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInsight/daemon/quickFix/AddVariableInitializerTest.java | 1024 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.daemon.quickFix;
import com.intellij.codeInsight.daemon.quickFix.LightQuickFixParameterizedTestCase;
public class AddVariableInitializerTest extends LightQuickFixParameterizedTestCase {
public void test() throws Exception {
doAllTests();
}
@Override
protected String getBasePath() {
return "/codeInsight/daemonCodeAnalyzer/quickFix/addVariableInitializer";
}
}
| apache-2.0 |
ern/elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java | 6961 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.gradle.internal;
import org.apache.commons.io.FileUtils;
import org.apache.tools.ant.taskdefs.condition.Os;
import org.elasticsearch.gradle.LoggedExec;
import org.gradle.api.Action;
import org.elasticsearch.gradle.Version;
import org.gradle.api.GradleException;
import org.gradle.api.Project;
import org.gradle.api.Task;
import org.gradle.api.logging.LogLevel;
import org.gradle.api.provider.Provider;
import org.gradle.api.tasks.TaskProvider;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import static org.elasticsearch.gradle.internal.util.JavaUtil.getJavaHome;
/**
* By registering bwc tasks via this extension we can support declaring custom bwc tasks from the build script
* without relying on groovy closures and sharing common logic for tasks created by the BwcSetup plugin already.
*/
public class BwcSetupExtension {
private static final String MINIMUM_COMPILER_VERSION_PATH = "src/main/resources/minimumCompilerVersion";
private static final Version BUILD_TOOL_MINIMUM_VERSION = Version.fromString("7.14.0");
private final Project project;
private final Provider<BwcVersions.UnreleasedVersionInfo> unreleasedVersionInfo;
private final Provider<InternalDistributionBwcSetupPlugin.BwcTaskThrottle> bwcTaskThrottleProvider;
private Provider<File> checkoutDir;
public BwcSetupExtension(
Project project,
Provider<BwcVersions.UnreleasedVersionInfo> unreleasedVersionInfo,
Provider<InternalDistributionBwcSetupPlugin.BwcTaskThrottle> bwcTaskThrottleProvider,
Provider<File> checkoutDir
) {
this.project = project;
this.unreleasedVersionInfo = unreleasedVersionInfo;
this.bwcTaskThrottleProvider = bwcTaskThrottleProvider;
this.checkoutDir = checkoutDir;
}
TaskProvider<LoggedExec> bwcTask(String name, Action<LoggedExec> configuration) {
return createRunBwcGradleTask(project, name, configuration);
}
private TaskProvider<LoggedExec> createRunBwcGradleTask(Project project, String name, Action<LoggedExec> configAction) {
return project.getTasks().register(name, LoggedExec.class, loggedExec -> {
loggedExec.dependsOn("checkoutBwcBranch");
loggedExec.usesService(bwcTaskThrottleProvider);
loggedExec.setSpoolOutput(true);
loggedExec.setWorkingDir(checkoutDir.get());
loggedExec.doFirst(new Action<Task>() {
@Override
public void execute(Task t) {
// Execution time so that the checkouts are available
String compilerVersionInfoPath = minimumCompilerVersionPath(unreleasedVersionInfo.get().version);
String minimumCompilerVersion = readFromFile(new File(checkoutDir.get(), compilerVersionInfoPath));
loggedExec.environment("JAVA_HOME", getJavaHome(Integer.parseInt(minimumCompilerVersion)));
}
});
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
loggedExec.executable("cmd");
loggedExec.args("/C", "call", new File(checkoutDir.get(), "gradlew").toString());
} else {
loggedExec.executable(new File(checkoutDir.get(), "gradlew").toString());
}
if (project.getGradle().getStartParameter().isOffline()) {
loggedExec.args("--offline");
}
// TODO resolve
String buildCacheUrl = System.getProperty("org.elasticsearch.build.cache.url");
if (buildCacheUrl != null) {
loggedExec.args("-Dorg.elasticsearch.build.cache.url=" + buildCacheUrl);
}
loggedExec.args("-Dbuild.snapshot=true");
loggedExec.args("-Dscan.tag.NESTED");
final LogLevel logLevel = project.getGradle().getStartParameter().getLogLevel();
List<LogLevel> nonDefaultLogLevels = Arrays.asList(LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG);
if (nonDefaultLogLevels.contains(logLevel)) {
loggedExec.args("--" + logLevel.name().toLowerCase(Locale.ENGLISH));
}
final String showStacktraceName = project.getGradle().getStartParameter().getShowStacktrace().name();
assert Arrays.asList("INTERNAL_EXCEPTIONS", "ALWAYS", "ALWAYS_FULL").contains(showStacktraceName);
if (showStacktraceName.equals("ALWAYS")) {
loggedExec.args("--stacktrace");
} else if (showStacktraceName.equals("ALWAYS_FULL")) {
loggedExec.args("--full-stacktrace");
}
if (project.getGradle().getStartParameter().isParallelProjectExecutionEnabled()) {
loggedExec.args("--parallel");
}
loggedExec.setStandardOutput(new IndentingOutputStream(System.out, unreleasedVersionInfo.get().version));
loggedExec.setErrorOutput(new IndentingOutputStream(System.err, unreleasedVersionInfo.get().version));
configAction.execute(loggedExec);
});
}
private String minimumCompilerVersionPath(Version bwcVersion) {
return (bwcVersion.onOrAfter(BUILD_TOOL_MINIMUM_VERSION)) ?
"build-tools-internal/" + MINIMUM_COMPILER_VERSION_PATH :
"buildSrc/" + MINIMUM_COMPILER_VERSION_PATH;
}
private static class IndentingOutputStream extends OutputStream {
public final byte[] indent;
private final OutputStream delegate;
IndentingOutputStream(OutputStream delegate, Object version) {
this.delegate = delegate;
indent = (" [" + version + "] ").getBytes(StandardCharsets.UTF_8);
}
@Override
public void write(int b) throws IOException {
int[] arr = {b};
write(arr, 0, 1);
}
public void write(int[] bytes, int offset, int length) throws IOException {
for (int i = 0; i < bytes.length; i++) {
delegate.write(bytes[i]);
if (bytes[i] == '\n') {
delegate.write(indent);
}
}
}
}
private static String readFromFile(File file) {
try {
return FileUtils.readFileToString(file).trim();
} catch (IOException ioException) {
throw new GradleException("Cannot read java properties file.", ioException);
}
}
}
| apache-2.0 |