gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.shindig.gadgets.rewrite;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.shindig.common.uri.Uri;
import org.apache.shindig.gadgets.http.HttpRequest;
import org.apache.shindig.gadgets.http.HttpResponse;
import org.apache.shindig.gadgets.http.HttpResponseBuilder;
import org.apache.shindig.gadgets.parse.caja.CajaCssLexerParser;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
import java.io.StringReader;
import java.io.StringWriter;
import java.util.List;
import com.google.common.collect.Lists;
/**
*
*/
public class CssRequestRewriterTest extends BaseRewriterTestCase {
private CssRequestRewriter rewriter;
private CssRequestRewriter rewriterNoOverrideExpires;
private Uri dummyUri;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
ContentRewriterFeature overrideFeatureNoOverrideExpires = rewriterFeatureFactory
.get(createSpecWithRewrite(".*", ".*exclude.*", null,
HTMLContentRewriter.TAGS));
ContentRewriterFeatureFactory factoryNoOverrideExpires = mockContentRewriterFeatureFactory(overrideFeatureNoOverrideExpires);
rewriterNoOverrideExpires = new CssRequestRewriter(
factoryNoOverrideExpires, new CajaCssLexerParser(),
new DefaultProxyingLinkRewriterFactory(rewriterUris));
ContentRewriterFeature overrideFeature =
rewriterFeatureFactory.get(createSpecWithRewrite(".*", ".*exclude.*", "3600",
HTMLContentRewriter.TAGS));
ContentRewriterFeatureFactory factory = mockContentRewriterFeatureFactory(overrideFeature);
rewriter = new CssRequestRewriter(factory, new CajaCssLexerParser(),
new DefaultProxyingLinkRewriterFactory(rewriterUris));
dummyUri = Uri.parse("http://www.w3c.org");
}
@Test
public void testCssBasic() throws Exception {
String content = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic.css"));
String expected = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic-expected.css"));
HttpRequest request = new HttpRequest(Uri.parse("http://www.example.org/path/rewritebasic.css"));
request.setMethod("GET");
request.setGadget(SPEC_URL);
HttpResponse response = new HttpResponseBuilder().setHeader("Content-Type", "text/css")
.setResponseString(content).create();
MutableContent mc = new MutableContent(null, content);
rewriter.rewrite(request, response, mc);
assertEquals(StringUtils.deleteWhitespace(expected),
StringUtils.deleteWhitespace(mc.getContent()));
}
@Test
public void testCssBasicNoOverrideExpires() throws Exception {
String content = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic.css"));
String expected = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic-expected.css"));
expected = expected.replaceAll("refresh=3600", "refresh=86400");
HttpRequest request = new HttpRequest(Uri.parse("http://www.example.org/path/rewritebasic.css"));
request.setMethod("GET");
request.setGadget(SPEC_URL);
HttpResponse response = new HttpResponseBuilder().setHeader("Content-Type", "text/css")
.setResponseString(content).create();
MutableContent mc = new MutableContent(null, content);
rewriterNoOverrideExpires.rewrite(request, response, mc);
assertEquals(StringUtils.deleteWhitespace(expected),
StringUtils.deleteWhitespace(mc.getContent()));
}
@Test
public void testCssBasicNoProxy() throws Exception {
String content = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic.css"));
String expected = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic-expected.css"));
expected = expected.replaceAll("fp=1150739864", "fp=1150739864&nocache=1");
HttpRequest request = new HttpRequest(Uri.parse("http://www.example.org/path/rewritebasic.css"));
request.setMethod("GET");
request.setGadget(SPEC_URL);
request.setIgnoreCache(true);
HttpResponse response = new HttpResponseBuilder().setHeader("Content-Type", "text/css")
.setResponseString(content).create();
MutableContent mc = new MutableContent(null, content);
rewriter.rewrite(request, response, mc);
assertEquals(StringUtils.deleteWhitespace(expected),
StringUtils.deleteWhitespace(mc.getContent()));
}
@Test
public void testCssWithContainerProxy() throws Exception {
String content = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic.css"));
String expected = IOUtils.toString(this.getClass().getClassLoader().
getResourceAsStream("org/apache/shindig/gadgets/rewrite/rewritebasic-expected.css"));
expected = replaceDefaultWithMockServer(expected);
HttpRequest request = new HttpRequest(Uri.parse("http://www.example.org/path/rewritebasic.css"));
request.setMethod("GET");
request.setGadget(SPEC_URL);
request.setContainer(MOCK_CONTAINER);
HttpResponse response = new HttpResponseBuilder().setHeader("Content-Type", "text/css")
.setResponseString(content).create();
MutableContent mc = new MutableContent(null, content);
rewriter.rewrite(request, response, mc);
assertEquals(StringUtils.deleteWhitespace(expected),
StringUtils.deleteWhitespace(mc.getContent()));
}
@Test
public void testNoRewriteUnknownMimeType() {
MutableContent mc = control.createMock(MutableContent.class);
HttpRequest req = control.createMock(HttpRequest.class);
EasyMock.expect(req.getRewriteMimeType()).andReturn("unknown");
control.replay();
assertFalse(rewriter.rewrite(req, fakeResponse, mc));
control.verify();
}
private void validateRewritten(String content, Uri base,
LinkRewriter linkRewriter, String expected) {
MutableContent mc = new MutableContent(null, content);
HttpRequest request = new HttpRequest(base);
rewriter.rewrite(request,
new HttpResponseBuilder().setHeader("Content-Type", "text/css").create(), mc);
assertEquals(StringUtils.deleteWhitespace(expected),
StringUtils.deleteWhitespace(mc.getContent()));
}
private void validateRewritten(String content, String expected) {
validateRewritten(content, dummyUri, defaultLinkRewriter, expected);
}
@Test
public void testUrlDeclarationRewrite() {
String original =
"div {list-style-image:url('http://a.b.com/bullet.gif');list-style-position:outside;margin:5px;padding:0}\n" +
".someid {background-image:url(http://a.b.com/bigimg.png);float:right;width:165px;height:23px;margin-top:4px;margin-left:5px}";
String rewritten =
"div {list-style-image:url('http://www.test.com/dir/proxy?url=http%3A%2F%2Fa.b.com%2Fbullet.gif&fp=1150739864&refresh=3600');\n"
+ "list-style-position:outside;margin:5px;padding:0}\n"
+ ".someid {background-image:url('http://www.test.com/dir/proxy?url=http%3A%2F%2Fa.b.com%2Fbigimg.png&fp=1150739864&refresh=3600');\n"
+ "float:right;width:165px;height:23px;margin-top:4px;margin-left:5px}";
validateRewritten(original, rewritten);
}
@Test
public void testExtractImports() {
String original = " @import url(www.example.org/some.css);\n" +
"@import url('www.example.org/someother.css');\n" +
"@import url(\"www.example.org/another.css\");\n" +
" div { color: blue; }\n" +
" p { color: black; }\n" +
" span { color: red; }";
String expected = " div { color: blue; }\n" +
" p { color: black; }\n" +
" span { color: red; }";
StringWriter sw = new StringWriter();
List<String> stringList = rewriter
.rewrite(new StringReader(original), dummyUri, defaultLinkRewriter, sw, true);
assertEquals(expected, sw.toString());
assertEquals(stringList, Lists.newArrayList("www.example.org/some.css",
"www.example.org/someother.css", "www.example.org/another.css"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.test.spec.security.basicproxypreparer;
import java.util.logging.Level;
import com.sun.jini.qa.harness.QATest;
import com.sun.jini.qa.harness.QAConfig;
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.TestException;
import com.sun.jini.qa.harness.QAConfig;
import java.util.logging.Logger;
import java.util.logging.Level;
import net.jini.security.BasicProxyPreparer;
import java.security.Permission;
import net.jini.core.constraint.MethodConstraints;
import net.jini.core.constraint.InvocationConstraint;
import net.jini.core.constraint.Integrity;
/**
* <pre>
* Purpose:
* This test verifies the behavior of the BasicProxyPreparer class
* equals method:
* boolean equals(Object object)
*
* Test Cases:
* This test contains several test case - one combination for each form
* of constructor, verify value true and false and methodConstrains value
* null, empty and not empty. Each case goes through actions described below
* in case if they have a sense for that form.
*
* Infrastructure:
* This test requires the following infrastructure:
* 1) FakeBasicProxyPreparer - accessor of the tested BasicProxyPreparer
* class, that is gateway to protected method.
* 2) FakeMethodConstraints - a fake MethodConstraints object with
* configurable method return values.
*
* Actions:
* Test checks normal and exceptional variants of the
* parameters for the getMethodConstraints method.
*
* Test verifies the following assertions and performs the following steps:
* 1) Returns true if the given object is an instance of the same
* class as this object, with the same value for verify, with
* method constraints that are equals ...,
* and with permissions containing the same elements, independent
* of order.
* a) returns true for the same object;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* call equals method passing the same object as argument;
* assert true is returned;
* b) returns true for another equal object;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and the same instance for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert true is returned;
* c) an instance of not the same class;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct FakeBasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and the same instance for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* invoke equals method from the first object passing null
* as an argument;
* assert false is returned;
* d) not the same value for "verify";
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing false for
* "verify", the same values for "permissions" and the same instance for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* invoke equals method from the second object passing the first object
* as an argument;
* assert false is returned;
* e) not equal method constraints;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and some another different
* instance for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* f) specified and not specified method constraints;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and null for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* invoke equals method from the second object passing the first object
* as an argument;
* assert false is returned;
* g) permissions containing not the same elements;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* some another different values for "permissions" and the same
* instance for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* h) permissions containing the same elements, but in the another order;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some non empty instance for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* some another values for "permissions" that contains the same
* permissions but in the different order and the same instance for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert true is returned;
* 2) Returns true if the given object is an instance of the same
* class as this object, with the same value for verify, with
* method constraints that ... similarly not specified,
* and with permissions containing the same elements, independent
* of order.
* steps:
* a) returns true for the same object;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and some null for
* "methodConstrains";
* call equals method passing the same object as argument;
* assert true is returned;
* b) returns true for another equal object;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and null for "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and null for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert true is returned;
* c) an instance of not the same class;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" null for "methodConstrains";
* construct FakeBasicProxyPreparer object passing true for "verify",
* the same values for "permissions" and null for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* invoke equals method from the first object passing null
* as an argument;
* assert false is returned;
* d) not the same value for "verify";
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and null for "methodConstrains";
* construct another BasicProxyPreparer object passing false for
* "verify", the same values for "permissions" and null for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* invoke equals method from the second object passing the first object
* as an argument;
* assert false is returned;
* g) permissions containing not the same elements;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and null for "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* some another different values for "permissions" and null
* for "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert false is returned;
* h) permissions containing the same elements, but in the another order;
* steps:
* construct a BasicProxyPreparer object passing true for "verify",
* some valid values for "permissions" and null for
* "methodConstrains";
* construct another BasicProxyPreparer object passing true for "verify",
* some another values for "permissions" that contains the same
* permissions but in the different order and null for
* "methodConstrains";
* invoke equals method from the first object passing the second object
* as an argument;
* assert true is returned;
* </pre>
*/
public class Equals_Test extends QATest {
/**
* Test cases description.
* Elemens: amount of arguments in BasicProxyPreparer constructor
* plus 10 if verify should be true
* plus 100 if methodConstrains should be empty instance
* plus 200 if methodConstrains should be some non empty instance
* plus 1000 if permissions should be some empty instance
* plus 2000 if permissions should be some non empty instance
*/
protected final int [] cases = {
0, 2, 3, 12, 13, 103, 113, 203, 213,
1000, 1002, 1003, 1012, 1013, 1103, 1113, 1203, 1213,
2000, 2002, 2003, 2012, 2013, 2103, 2113, 2203, 2213 };
/**
* Run BasicProxyPreparer constructor for valid test case.
*
* @param testCase int value 0, 2 or 3 according to
* amount of arguments in BasicProxyPreparer constructor
* @param verify whether to verify if proxies are trusted
* @param methodConstraints method constraints to use when verifying
* and setting constraints
* @param permissions permissions to grant, or <code>null</code> if no
* permissions should be granted
*/
protected FakeBasicProxyPreparer callFakeConstructor(
int testCase,
boolean verify,
MethodConstraints methodConstraints,
Permission[] permissions) {
if (testCase == 0) { // constructor without arguments
return new FakeBasicProxyPreparer();
} else if (testCase == 2) { // constructor with 2 arguments
return new FakeBasicProxyPreparer(verify, permissions);
} else { // constructor with 3 arguments
return new FakeBasicProxyPreparer(verify,
methodConstraints, permissions);
}
}
/**
* Run BasicProxyPreparer constructor for valid test case.
*
* @param testCase int value 0, 2 or 3 according to
* amount of arguments in BasicProxyPreparer constructor
* @param verify whether to verify if proxies are trusted
* @param methodConstraints method constraints to use when verifying
* and setting constraints
* @param permissions permissions to grant, or <code>null</code> if no
* permissions should be granted
*/
protected BasicProxyPreparer callConstructor(
int testCase,
boolean verify,
MethodConstraints methodConstraints,
Permission[] permissions) {
if (testCase == 0) { // constructor without arguments
return new BasicProxyPreparer();
} else if (testCase == 2) { // constructor with 2 arguments
return new BasicProxyPreparer(verify, permissions);
} else { // constructor with 3 arguments
return new BasicProxyPreparer(verify,
methodConstraints, permissions);
}
}
/**
* This method performs all actions mentioned in class description.
*/
public void run() throws Exception {
logger.log(Level.INFO, "======================================");
for (int i = 0; i < cases.length; ++i) {
int testCase = cases[i];
logger.log(Level.INFO, "--> " + testCase);
MethodConstraints mc = null;
Permission[] perm = null;
if (testCase > 2000) {
perm = new Permission[] {
new RuntimePermission("getClassLoader")};
testCase -= 2000;
} else if (testCase > 1000) {
perm = new Permission[] {};
testCase -= 1000;
}
if (testCase > 200) {
mc = new FakeMethodConstraints(
new InvocationConstraint[] {Integrity.NO});
testCase -= 200;
} else if (testCase > 100) {
mc = new FakeMethodConstraints(null);
testCase -= 100;
}
boolean verify = false;
if (testCase > 10) {
verify = true;
testCase -= 10;
}
// a - same object
BasicProxyPreparer bpp = callConstructor(
testCase, verify, mc, perm);
if (!bpp.equals(bpp)) {
throw new TestException(
"equals method should return true (a)");
}
// b - eqaul object
bpp = callConstructor(
testCase, verify, mc, perm);
BasicProxyPreparer bpp2 = callConstructor(
testCase, verify, mc, perm);
if (!bpp.equals(bpp2)) {
throw new TestException(
"equals method should return true (b)");
}
// c - different class
bpp = callFakeConstructor(
testCase, verify, mc, perm);
BasicProxyPreparer fbpp = callConstructor(
testCase, verify, mc, perm);
if (bpp.equals(bpp2)) {
throw new TestException(
"equals method should return false (c)");
}
if (bpp2.equals(bpp)) {
throw new TestException(
"equals method should return false (c)");
}
if (testCase == 0) { continue; }
// d - not the same value for "verify"
bpp = callConstructor(
testCase, verify, mc, perm);
bpp2 = callConstructor(
testCase, !verify, mc, perm);
if (bpp.equals(bpp2)) {
throw new TestException(
"equals method should return false (d)");
}
if (bpp2.equals(bpp)) {
throw new TestException(
"equals method should return false (d)");
}
// g - permissions containing not the same elements
bpp = callConstructor(
testCase, verify, mc, perm);
Permission[] perm2 = new Permission[] {
new RuntimePermission("setClassLoader")};
bpp2 = callConstructor(
testCase, verify, mc, perm2);
if (bpp.equals(bpp2)) {
throw new TestException(
"equals method should return false (g)");
}
if (bpp2.equals(bpp)) {
throw new TestException(
"equals method should return false (g)");
}
// h - permissions containing the same elements, but in
// the another order
Permission[] perm3 = new Permission[] {
new RuntimePermission("getClassLoader"),
new RuntimePermission("setClassLoader")};
bpp = callConstructor(
testCase, verify, mc, perm3);
Permission[] perm4 = new Permission[] {
new RuntimePermission("setClassLoader"),
new RuntimePermission("getClassLoader")};
bpp2 = callConstructor(
testCase, verify, mc, perm4);
if (!bpp.equals(bpp2)) {
throw new TestException(
"equals method should return true (h)");
}
if (!bpp2.equals(bpp)) {
throw new TestException(
"equals method should return true (h)");
}
if (testCase == 2) { continue; }
// e, f - not equal method constraints
bpp = callConstructor(
testCase, verify, mc, perm);
MethodConstraints mc2 = new FakeMethodConstraints(
new InvocationConstraint[] {Integrity.YES});
bpp2 = callConstructor(
testCase, verify, mc2, perm);
if (bpp.equals(bpp2)) {
throw new TestException(
"equals method should return false (e)");
}
if (bpp2.equals(bpp)) {
throw new TestException(
"equals method should return false (e)");
}
}
}
}
| |
/*
* Copyright 2005 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.directwebremoting.impl;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.directwebremoting.WebContextFactory;
import org.directwebremoting.extend.AccessControl;
import org.directwebremoting.extend.AccessDeniedException;
import org.directwebremoting.extend.Creator;
/**
* Control who should be accessing which methods on which classes.
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public class DefaultAccessControl implements AccessControl
{
/* (non-Javadoc)
* @see org.directwebremoting.extend.AccessControl#assertExecutionIsPossible(org.directwebremoting.extend.Creator, java.lang.String, java.lang.reflect.Method)
*/
public void assertExecutionIsPossible(Creator creator, String className, Method method) throws SecurityException
{
assertIsRestrictedByRole(className, method);
assertIsDisplayable(creator, className, method);
}
/* (non-Javadoc)
* @see org.directwebremoting.AccessControl#getReasonToNotDisplay(org.directwebremoting.Creator, java.lang.String, java.lang.reflect.Method)
*/
public void assertIsDisplayable(Creator creator, String className, Method method) throws SecurityException
{
assertIsMethodPublic(method);
assertIsExecutable(className, method.getName());
assertIsNotOnBaseObject(method);
if (!exposeInternals)
{
assertIsClassDwrInternal(creator);
assertAreParametersDwrInternal(method);
}
}
/* (non-Javadoc)
* @see org.directwebremoting.AccessControl#addRoleRestriction(java.lang.String, java.lang.String, java.lang.String)
*/
public void addRoleRestriction(String scriptName, String methodName, String role)
{
String key = scriptName + '.' + methodName;
Set<String> roles = roleRestrictMap.get(key);
if (roles == null)
{
roles = new HashSet<String>();
roleRestrictMap.put(key, roles);
}
roles.add(role);
}
/* (non-Javadoc)
* @see org.directwebremoting.AccessControl#addIncludeRule(java.lang.String, java.lang.String)
*/
public void addIncludeRule(String scriptName, String methodName)
{
Policy policy = getPolicy(scriptName);
// If the policy for the given type is defaultAllow then we need to go
// to default disallow mode, and check that the are not rules applied
if (policy.defaultAllow)
{
if (!policy.rules.isEmpty())
{
throw new IllegalArgumentException("The Creator '" + scriptName + "' uses mixed include and exclude statements");
}
policy.defaultAllow = false;
}
// Add the rule to this policy
policy.rules.add(methodName);
}
/* (non-Javadoc)
* @see org.directwebremoting.AccessControl#addExcludeRule(java.lang.String, java.lang.String)
*/
public void addExcludeRule(String scriptName, String methodName)
{
Policy policy = getPolicy(scriptName);
// If the policy for the given type is defaultAllow then we need to go
// to default disallow mode, and check that the are not rules applied
if (!policy.defaultAllow)
{
if (!policy.rules.isEmpty())
{
throw new IllegalArgumentException("The Creator '" + scriptName + "' uses mixed include and exclude statements");
}
policy.defaultAllow = true;
}
// Add the rule to this policy
policy.rules.add(methodName);
}
/**
* @param scriptName The name of the creator to Javascript
* @param method The method to execute
*/
protected void assertIsRestrictedByRole(String scriptName, Method method)
{
String methodName = method.getName();
// What if there is some J2EE role based restriction?
Set<String> roles = getRoleRestrictions(scriptName, methodName);
if (roles != null && !roles.isEmpty())
{
HttpServletRequest req = WebContextFactory.get().getHttpServletRequest();
assertAuthenticationIsValid(req);
assertAllowedByRoles(req, roles);
}
}
/**
* @param scriptName The name of the creator to Javascript
* @param methodName The name of the method (without brackets)
* @return A Set of all the roles for the given script and method
*/
protected Set<String> getRoleRestrictions(String scriptName, String methodName)
{
String key = scriptName + '.' + methodName;
return roleRestrictMap.get(key);
}
/**
* Check the users session for validity
* @param req The users request
* @throws SecurityException if the users session is invalid
*/
protected static void assertAuthenticationIsValid(HttpServletRequest req) throws SecurityException
{
// ensure that at least the next call has a valid session
req.getSession();
// if there was an expired session, the request has to fail
if (!req.isRequestedSessionIdValid())
{
throw new LoginRequiredException("Session timed out, or invalid");
}
if (req.getRemoteUser() == null)
{
throw new LoginRequiredException("No valid authentication details");
}
}
/**
* Is this current user in the given list of roles
* @param req The users request
* @param roles The list of roles to check
* @throws SecurityException if this user is not allowed by the list of roles
*/
protected static void assertAllowedByRoles(HttpServletRequest req, Set<String> roles) throws SecurityException
{
for (String role : roles)
{
if ("*".equals(role) || req.isUserInRole(role))
{
return;
}
}
throw new AccessDeniedException("User is not in role for this method.");
}
/**
* Is the method public?
* @param method The method that we wish to execute
*/
protected static void assertIsMethodPublic(Method method)
{
if (!Modifier.isPublic(method.getModifiers()))
{
throw new SecurityException("The method is not declared public");
}
}
/**
* We ban some methods from {@link java.lang.Object}
* @param method The method that should not be owned by {@link java.lang.Object}
*/
protected static void assertIsNotOnBaseObject(Method method)
{
if (method.getDeclaringClass() == Object.class)
{
throw new SecurityException("Methods defined in java.lang.Object are not accessible");
}
}
/**
* Test to see if a method is excluded or included.
* @param scriptName The name of the creator to Javascript
* @param methodName The name of the method (without brackets)
* @throws SecurityException if the method is allowed by the rules in addIncludeRule()
* @see AccessControl#addIncludeRule(String, String)
*/
protected void assertIsExecutable(String scriptName, String methodName) throws SecurityException
{
Policy policy = policyMap.get(scriptName);
if (policy == null)
{
return;
}
// Find a match for this method in the policy rules
String match = null;
for (Iterator<String> it = policy.rules.iterator(); it.hasNext() && match == null;)
{
String test = it.next();
// If at some point we wish to do regex matching on rules, here is
// the place to do it.
if (methodName.equals(test))
{
match = test;
}
}
if (policy.defaultAllow && match != null)
{
// We are in default allow mode so the rules are exclusions and we
// have a match, so this method is excluded.
//log.debug("method excluded for creator " + type + " due to defaultAllow=" + policy.defaultAllow + " and rule: " + match);
throw new SecurityException("Method access is denied by rules in dwr.xml");
}
// There may be a more optimized if statement here, but I value code
// clarity over performance.
//noinspection RedundantIfStatement
if (!policy.defaultAllow && match == null)
{
// We are in default deny mode so the rules are inclusions and we
// do not have a match, so this method is excluded.
//log.debug("method excluded for creator " + type + " due to defaultAllow=" + policy.defaultAllow + " and rule: " + match);
throw new SecurityException("Method access is denied by rules in dwr.xml");
}
}
/**
* Check the parameters are not DWR internal either
* @param method The method that we want to execute
*/
protected static void assertAreParametersDwrInternal(Method method)
{
for (int j = 0; j < method.getParameterTypes().length; j++)
{
Class<?> paramType = method.getParameterTypes()[j];
// Access to org.directwebremoting is denied except for .io
if (paramType.getName().startsWith(PACKAGE_DWR_DENY) && !paramType.getName().startsWith(PACKAGE_ALLOW_CONVERT))
{
throw new SecurityException("Methods containing parameters defined by DWR can not be remoted");
}
}
}
/**
* Is the class that we are executing a method on part of DWR?
* @param creator The {@link Creator} that exposes the class
*/
protected static void assertIsClassDwrInternal(Creator creator)
{
String name = creator.getType().getName();
// Access to org.directwebremoting is denied except for .export
if (name.startsWith(PACKAGE_DWR_DENY) && !name.startsWith(PACKAGE_ALLOW_CREATE))
{
throw new SecurityException("Methods defined by DWR can not be remoted");
}
}
/**
* Find the policy for the given type and create one if none exists.
* @param type The name of the creator
* @return The policy for the given Creator
*/
protected Policy getPolicy(String type)
{
Policy policy = policyMap.get(type);
if (policy == null)
{
policy = new Policy();
policyMap.put(type, policy);
}
return policy;
}
/**
* @param exposeInternals the exposeInternals to set
*/
public void setExposeInternals(boolean exposeInternals)
{
this.exposeInternals = exposeInternals;
}
/**
* Do we allow DWR classes to be remoted?
* @see #PACKAGE_DWR_DENY
*/
protected boolean exposeInternals = false;
/**
* A map of Creators to policies
*/
protected Map<String, Policy> policyMap = new HashMap<String, Policy>();
/**
* What role based restrictions are there?
*/
protected Map<String, Set<String>> roleRestrictMap = new HashMap<String, Set<String>>();
/**
* A struct that contains a method access policy for a Creator
*/
static class Policy
{
boolean defaultAllow = true;
List<String> rules = new ArrayList<String>();
}
/**
* My package name, so we can ban DWR classes from being created or marshalled
*/
protected static final String PACKAGE_DWR_DENY = "org.directwebremoting.";
/**
* Special dwr package name from which classes may be created
*/
protected static final String PACKAGE_ALLOW_CREATE = "org.directwebremoting.export.";
/**
* Special dwr package name from which classes may be converted
*/
protected static final String PACKAGE_ALLOW_CONVERT = "org.directwebremoting.io.";
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.stream;
import io.netty.buffer.MessageBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelDuplexHandler;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelHandlerUtil;
import io.netty.channel.ChannelOutboundMessageHandler;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.ChannelPromise;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import java.nio.channels.ClosedChannelException;
import java.util.concurrent.atomic.AtomicInteger;
/**
* A {@link ChannelHandler} that adds support for writing a large data stream
* asynchronously neither spending a lot of memory nor getting
* {@link OutOfMemoryError}. Large data streaming such as file
* transfer requires complicated state management in a {@link ChannelHandler}
* implementation. {@link ChunkedWriteHandler} manages such complicated states
* so that you can send a large data stream without difficulties.
* <p>
* To use {@link ChunkedWriteHandler} in your application, you have to insert
* a new {@link ChunkedWriteHandler} instance:
* <pre>
* {@link ChannelPipeline} p = ...;
* p.addLast("streamer", <b>new {@link ChunkedWriteHandler}()</b>);
* p.addLast("handler", new MyHandler());
* </pre>
* Once inserted, you can write a {@link ChunkedInput} so that the
* {@link ChunkedWriteHandler} can pick it up and fetch the content of the
* stream chunk by chunk and write the fetched chunk downstream:
* <pre>
* {@link Channel} ch = ...;
* ch.write(new {@link ChunkedFile}(new File("video.mkv"));
* </pre>
*
* <h3>Sending a stream which generates a chunk intermittently</h3>
*
* Some {@link ChunkedInput} generates a chunk on a certain event or timing.
* Such {@link ChunkedInput} implementation often returns {@code null} on
* {@link ChunkedInput#readChunk(Object)}, resulting in the indefinitely suspended
* transfer. To resume the transfer when a new chunk is available, you have to
* call {@link #resumeTransfer()}.
*/
public class ChunkedWriteHandler
extends ChannelDuplexHandler implements ChannelOutboundMessageHandler<Object> {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(ChunkedWriteHandler.class);
private final MessageBuf<Object> queue = Unpooled.messageBuffer();
private final int maxPendingWrites;
private volatile ChannelHandlerContext ctx;
private final AtomicInteger pendingWrites = new AtomicInteger();
private Object currentEvent;
public ChunkedWriteHandler() {
this(4);
}
public ChunkedWriteHandler(int maxPendingWrites) {
if (maxPendingWrites <= 0) {
throw new IllegalArgumentException(
"maxPendingWrites: " + maxPendingWrites + " (expected: > 0)");
}
this.maxPendingWrites = maxPendingWrites;
}
@Override
public MessageBuf<Object> newOutboundBuffer(ChannelHandlerContext ctx) throws Exception {
return queue;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
this.ctx = ctx;
}
private boolean isWritable() {
return pendingWrites.get() < maxPendingWrites;
}
/**
* Continues to fetch the chunks from the input.
*/
public void resumeTransfer() {
final ChannelHandlerContext ctx = this.ctx;
if (ctx == null) {
return;
}
if (ctx.executor().inEventLoop()) {
try {
doFlush(ctx);
} catch (Exception e) {
if (logger.isWarnEnabled()) {
logger.warn("Unexpected exception while sending chunks.", e);
}
}
} else {
// let the transfer resume on the next event loop round
ctx.executor().execute(new Runnable() {
@Override
public void run() {
try {
doFlush(ctx);
} catch (Exception e) {
if (logger.isWarnEnabled()) {
logger.warn("Unexpected exception while sending chunks.", e);
}
}
}
});
}
}
@Override
public void read(ChannelHandlerContext ctx) {
ctx.read();
}
@Override
public void flush(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
queue.add(promise);
if (isWritable() || !ctx.channel().isActive()) {
doFlush(ctx);
}
}
@Override
public void inboundBufferUpdated(ChannelHandlerContext ctx) throws Exception {
ctx.fireInboundBufferUpdated();
}
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
doFlush(ctx);
super.channelInactive(ctx);
}
private void discard(final ChannelHandlerContext ctx, Throwable cause) {
boolean fireExceptionCaught = false;
boolean success = true;
for (;;) {
Object currentEvent = this.currentEvent;
if (this.currentEvent == null) {
currentEvent = queue.poll();
} else {
this.currentEvent = null;
}
if (currentEvent == null) {
break;
}
if (currentEvent instanceof ChunkedInput) {
ChunkedInput<?> in = (ChunkedInput<?>) currentEvent;
try {
if (!in.isEndOfInput()) {
success = false;
}
} catch (Exception e) {
success = false;
logger.warn(ChunkedInput.class.getSimpleName() + ".isEndOfInput() failed", e);
}
closeInput(in);
} else if (currentEvent instanceof ChannelPromise) {
ChannelPromise f = (ChannelPromise) currentEvent;
if (!success) {
fireExceptionCaught = true;
if (cause == null) {
cause = new ClosedChannelException();
}
f.setFailure(cause);
} else {
f.setSuccess();
}
}
}
if (fireExceptionCaught) {
ctx.fireExceptionCaught(cause);
}
}
private void doFlush(final ChannelHandlerContext ctx) throws Exception {
Channel channel = ctx.channel();
if (!channel.isActive()) {
discard(ctx, null);
return;
}
while (isWritable()) {
if (currentEvent == null) {
currentEvent = queue.poll();
}
if (currentEvent == null) {
break;
}
final Object currentEvent = this.currentEvent;
if (currentEvent instanceof ChannelPromise) {
this.currentEvent = null;
ctx.flush((ChannelPromise) currentEvent);
} else if (currentEvent instanceof ChunkedInput) {
final ChunkedInput<?> chunks = (ChunkedInput<?>) currentEvent;
boolean read;
boolean endOfInput;
boolean suspend;
try {
read = readChunk(ctx, chunks);
endOfInput = chunks.isEndOfInput();
if (!read) {
// No need to suspend when reached at the end.
suspend = !endOfInput;
} else {
suspend = false;
}
} catch (final Throwable t) {
this.currentEvent = null;
if (ctx.executor().inEventLoop()) {
ctx.fireExceptionCaught(t);
} else {
ctx.executor().execute(new Runnable() {
@Override
public void run() {
ctx.fireExceptionCaught(t);
}
});
}
closeInput(chunks);
break;
}
if (suspend) {
// ChunkedInput.nextChunk() returned null and it has
// not reached at the end of input. Let's wait until
// more chunks arrive. Nothing to write or notify.
break;
}
pendingWrites.incrementAndGet();
ChannelFuture f = ctx.flush();
if (endOfInput) {
this.currentEvent = null;
// Register a listener which will close the input once the write is complete.
// This is needed because the Chunk may have some resource bound that can not
// be closed before its not written.
//
// See https://github.com/netty/netty/issues/303
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
pendingWrites.decrementAndGet();
closeInput(chunks);
}
});
} else if (isWritable()) {
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
pendingWrites.decrementAndGet();
if (!future.isSuccess()) {
closeInput((ChunkedInput<?>) currentEvent);
}
}
});
} else {
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) throws Exception {
pendingWrites.decrementAndGet();
if (!future.isSuccess()) {
closeInput((ChunkedInput<?>) currentEvent);
} else if (isWritable()) {
resumeTransfer();
}
}
});
}
} else {
ChannelHandlerUtil.addToNextOutboundBuffer(ctx, currentEvent);
this.currentEvent = null;
}
if (!channel.isActive()) {
discard(ctx, new ClosedChannelException());
return;
}
}
}
/**
* Read the next {@link ChunkedInput} and transfer it the the outbound buffer.
* @param ctx the {@link ChannelHandlerContext} this handler is bound to
* @param chunks the {@link ChunkedInput} to read from
* @return read {@code true} if something could be transfered to the outbound buffer
* @throws Exception if something goes wrong
*/
@SuppressWarnings("unchecked")
protected boolean readChunk(ChannelHandlerContext ctx, ChunkedInput<?> chunks) throws Exception {
if (chunks instanceof ChunkedByteInput) {
return ((ChunkedByteInput) chunks).readChunk(ctx.nextOutboundByteBuffer());
} else if (chunks instanceof ChunkedMessageInput) {
return ((ChunkedMessageInput<Object>) chunks).readChunk(ctx.nextOutboundMessageBuffer());
} else {
throw new IllegalArgumentException("ChunkedInput instance " + chunks + " not supported");
}
}
static void closeInput(ChunkedInput<?> chunks) {
try {
chunks.close();
} catch (Throwable t) {
if (logger.isWarnEnabled()) {
logger.warn("Failed to close a chunked input.", t);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.taskmanager;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.blob.BlobCacheService;
import org.apache.flink.runtime.blob.PermanentBlobCache;
import org.apache.flink.runtime.blob.TransientBlobCache;
import org.apache.flink.runtime.broadcast.BroadcastVariableManager;
import org.apache.flink.runtime.checkpoint.CheckpointMetaData;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.deployment.InputGateDeploymentDescriptor;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.execution.librarycache.LibraryCacheManager;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.executiongraph.JobInformation;
import org.apache.flink.runtime.executiongraph.TaskInformation;
import org.apache.flink.runtime.filecache.FileCache;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.network.NettyShuffleEnvironmentBuilder;
import org.apache.flink.runtime.io.network.TaskEventDispatcher;
import org.apache.flink.runtime.io.network.partition.NoOpResultPartitionConsumableNotifier;
import org.apache.flink.runtime.io.network.partition.ResultPartitionConsumableNotifier;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.metrics.groups.TaskMetricGroup;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.runtime.query.KvStateRegistry;
import org.apache.flink.runtime.shuffle.ShuffleEnvironment;
import org.apache.flink.runtime.state.TestTaskStateManager;
import org.apache.flink.runtime.taskexecutor.KvStateService;
import org.apache.flink.runtime.taskexecutor.PartitionProducerStateChecker;
import org.apache.flink.runtime.taskexecutor.TestGlobalAggregateManager;
import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo;
import org.apache.flink.util.SerializedValue;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Collections;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import static org.hamcrest.Matchers.isOneOf;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Testing asynchronous call of {@link Task}.
*/
public class TaskAsyncCallTest extends TestLogger {
/** Number of expected checkpoints. */
private static int numCalls;
/** Triggered at the beginning of {@link CheckpointsInOrderInvokable#invoke()}. */
private static OneShotLatch awaitLatch;
/**
* Triggered when {@link CheckpointsInOrderInvokable#triggerCheckpointAsync(CheckpointMetaData, CheckpointOptions, boolean)}
* was called {@link #numCalls} times.
*/
private static OneShotLatch triggerLatch;
private ShuffleEnvironment<?, ?> shuffleEnvironment;
@Before
public void createQueuesAndActors() {
numCalls = 1000;
awaitLatch = new OneShotLatch();
triggerLatch = new OneShotLatch();
shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build();
}
@After
public void teardown() throws Exception {
if (shuffleEnvironment != null) {
shuffleEnvironment.close();
}
}
// ------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------
@Test
public void testCheckpointCallsInOrder() throws Exception {
Task task = createTask(CheckpointsInOrderInvokable.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
awaitLatch.await();
for (int i = 1; i <= numCalls; i++) {
task.triggerCheckpointBarrier(i, 156865867234L, CheckpointOptions.forCheckpointWithDefaultLocation(), false);
}
triggerLatch.await();
assertFalse(task.isCanceledOrFailed());
ExecutionState currentState = task.getExecutionState();
assertThat(currentState, isOneOf(ExecutionState.RUNNING, ExecutionState.FINISHED));
}
}
@Test
public void testMixedAsyncCallsInOrder() throws Exception {
Task task = createTask(CheckpointsInOrderInvokable.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
awaitLatch.await();
for (int i = 1; i <= numCalls; i++) {
task.triggerCheckpointBarrier(i, 156865867234L, CheckpointOptions.forCheckpointWithDefaultLocation(), false);
task.notifyCheckpointComplete(i);
}
triggerLatch.await();
assertFalse(task.isCanceledOrFailed());
ExecutionState currentState = task.getExecutionState();
assertThat(currentState, isOneOf(ExecutionState.RUNNING, ExecutionState.FINISHED));
}
}
private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception {
BlobCacheService blobService =
new BlobCacheService(mock(PermanentBlobCache.class), mock(TransientBlobCache.class));
LibraryCacheManager libCache = mock(LibraryCacheManager.class);
when(libCache.getClassLoader(any(JobID.class))).thenReturn(new TestUserCodeClassLoader());
ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class);
Executor executor = mock(Executor.class);
TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup();
JobInformation jobInformation = new JobInformation(
new JobID(),
"Job Name",
new SerializedValue<>(new ExecutionConfig()),
new Configuration(),
Collections.emptyList(),
Collections.emptyList());
TaskInformation taskInformation = new TaskInformation(
new JobVertexID(),
"Test Task",
1,
1,
invokableClass.getName(),
new Configuration());
return new Task(
jobInformation,
taskInformation,
new ExecutionAttemptID(),
new AllocationID(),
0,
0,
Collections.<ResultPartitionDeploymentDescriptor>emptyList(),
Collections.<InputGateDeploymentDescriptor>emptyList(),
0,
mock(MemoryManager.class),
mock(IOManager.class),
shuffleEnvironment,
new KvStateService(new KvStateRegistry(), null, null),
mock(BroadcastVariableManager.class),
new TaskEventDispatcher(),
new TestTaskStateManager(),
mock(TaskManagerActions.class),
mock(InputSplitProvider.class),
mock(CheckpointResponder.class),
new NoOpTaskOperatorEventGateway(),
new TestGlobalAggregateManager(),
blobService,
libCache,
mock(FileCache.class),
new TestingTaskManagerRuntimeInfo(),
taskMetricGroup,
consumableNotifier,
partitionProducerStateChecker,
executor);
}
/**
* Invokable for testing checkpoints.
*/
public static class CheckpointsInOrderInvokable extends AbstractInvokable {
private volatile long lastCheckpointId = 0;
private volatile Exception error;
public CheckpointsInOrderInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
awaitLatch.trigger();
// wait forever (until canceled)
synchronized (this) {
while (error == null) {
wait();
}
}
if (error != null) {
// exit method prematurely due to error but make sure that the tests can finish
triggerLatch.trigger();
throw error;
}
}
@Override
public Future<Boolean> triggerCheckpointAsync(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
lastCheckpointId++;
if (checkpointMetaData.getCheckpointId() == lastCheckpointId) {
if (lastCheckpointId == numCalls) {
triggerLatch.trigger();
}
}
else if (this.error == null) {
this.error = new Exception("calls out of order");
synchronized (this) {
notifyAll();
}
}
return CompletableFuture.completedFuture(true);
}
@Override
public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) throws Exception {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public Future<Void> notifyCheckpointCompleteAsync(long checkpointId) {
if (checkpointId != lastCheckpointId && this.error == null) {
this.error = new Exception("calls out of order");
synchronized (this) {
notifyAll();
}
}
return CompletableFuture.completedFuture(null);
}
}
/** A {@link ClassLoader} that delegates everything to {@link ClassLoader#getSystemClassLoader()}. */
private static class TestUserCodeClassLoader extends ClassLoader {
TestUserCodeClassLoader() {
super(ClassLoader.getSystemClassLoader());
}
}
private static class TaskCleaner implements AutoCloseable {
private final Task task;
private TaskCleaner(Task task) {
this.task = task;
}
@Override
public void close() throws Exception {
task.cancelExecution();
task.getExecutingThread().join(5000);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import com.google.common.collect.Lists;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesBuilder;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.percentiles;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
/**
*
*/
public class TDigestPercentilesIT extends AbstractNumericTestCase {
private static double[] randomPercentiles() {
final int length = randomIntBetween(1, 20);
final double[] percentiles = new double[length];
for (int i = 0; i < percentiles.length; ++i) {
switch (randomInt(20)) {
case 0:
percentiles[i] = 0;
break;
case 1:
percentiles[i] = 100;
break;
default:
percentiles[i] = randomDouble() * 100;
break;
}
}
Arrays.sort(percentiles);
Loggers.getLogger(TDigestPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles));
return percentiles;
}
private static PercentilesBuilder randomCompression(PercentilesBuilder builder) {
if (randomBoolean()) {
builder.compression(randomIntBetween(20, 120) + randomDouble());
}
return builder;
}
private void assertConsistent(double[] pcts, Percentiles percentiles, long minValue, long maxValue) {
final List<Percentile> percentileList = Lists.newArrayList(percentiles);
assertEquals(pcts.length, percentileList.size());
for (int i = 0; i < pcts.length; ++i) {
final Percentile percentile = percentileList.get(i);
assertThat(percentile.getPercent(), equalTo(pcts[i]));
double value = percentile.getValue();
assertThat(value, greaterThanOrEqualTo((double) minValue));
assertThat(value, lessThanOrEqualTo((double) maxValue));
if (percentile.getPercent() == 0) {
assertThat(value, equalTo((double) minValue));
}
if (percentile.getPercent() == 100) {
assertThat(value, equalTo((double) maxValue));
}
}
for (int i = 1; i < percentileList.size(); ++i) {
assertThat(percentileList.get(i).getValue(), greaterThanOrEqualTo(percentileList.get(i - 1).getValue()));
}
}
@Override
@Test
public void testEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx")
.setQuery(matchAllQuery())
.addAggregation(histogram("histo").field("value").interval(1l).minDocCount(0)
.subAggregation(randomCompression(percentiles("percentiles"))
.percentiles(10, 15)))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2l));
Histogram histo = searchResponse.getAggregations().get("histo");
assertThat(histo, notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, notNullValue());
Percentiles percentiles = bucket.getAggregations().get("percentiles");
assertThat(percentiles, notNullValue());
assertThat(percentiles.getName(), equalTo("percentiles"));
assertThat(percentiles.percentile(10), equalTo(Double.NaN));
assertThat(percentiles.percentile(15), equalTo(Double.NaN));
}
@Override
@Test
public void testUnmapped() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx_unmapped")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("value")
.percentiles(0, 10, 15, 100))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(0l));
Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertThat(percentiles, notNullValue());
assertThat(percentiles.getName(), equalTo("percentiles"));
assertThat(percentiles.percentile(0), equalTo(Double.NaN));
assertThat(percentiles.percentile(10), equalTo(Double.NaN));
assertThat(percentiles.percentile(15), equalTo(Double.NaN));
assertThat(percentiles.percentile(100), equalTo(Double.NaN));
}
@Override
@Test
public void testSingleValuedField() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("value")
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue, maxValue);
}
@Override
@Test
public void testSingleValuedField_getProperty() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client()
.prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(
global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)))
.execute().actionGet();
assertHitCount(searchResponse, 10);
Global global = searchResponse.getAggregations().get("global");
assertThat(global, notNullValue());
assertThat(global.getName(), equalTo("global"));
assertThat(global.getDocCount(), equalTo(10l));
assertThat(global.getAggregations(), notNullValue());
assertThat(global.getAggregations().asMap().size(), equalTo(1));
Percentiles percentiles = global.getAggregations().get("percentiles");
assertThat(percentiles, notNullValue());
assertThat(percentiles.getName(), equalTo("percentiles"));
assertThat((Percentiles) global.getProperty("percentiles"), sameInstance(percentiles));
}
@Override
@Test
public void testSingleValuedField_PartiallyUnmapped() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("value")
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue, maxValue);
}
@Override
@Test
public void testSingleValuedField_WithValueScript() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("value").script(new Script("_value - 1"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
}
@Override
@Test
public void testSingleValuedField_WithValueScript_WithParams() throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("value")
.script(new Script("_value - dec", ScriptType.INLINE, null, params))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
}
@Override
@Test
public void testMultiValuedField() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("values")
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues, maxValues);
}
@Override
@Test
public void testMultiValuedField_WithValueScript() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("values").script(new Script("_value - 1"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
}
@Test
public void testMultiValuedField_WithValueScript_Reverse() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("values").script(new Script("_value * -1"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, -maxValues, -minValues);
}
@Override
@Test
public void testMultiValuedField_WithValueScript_WithParams() throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.field("values")
.script(new Script("_value - dec", ScriptType.INLINE, null, params))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
}
@Override
@Test
public void testScript_SingleValued() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(new Script("doc['value'].value"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue, maxValue);
}
@Override
@Test
public void testScript_SingleValued_WithParams() throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
}
@Override
@Test
public void testScript_ExplicitSingleValued_WithParams() throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(
new Script("doc['value'].value - dec", ScriptType.INLINE, null, params))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValue - 1, maxValue - 1);
}
@Override
@Test
public void testScript_MultiValued() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(new Script("doc['values'].values"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues, maxValues);
}
@Override
@Test
public void testScript_ExplicitMultiValued() throws Exception {
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(new Script("doc['values'].values"))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues, maxValues);
}
@Override
@Test
public void testScript_MultiValued_WithParams() throws Exception {
Map<String, Object> params = new HashMap<>();
params.put("dec", 1);
final double[] pcts = randomPercentiles();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(randomCompression(percentiles("percentiles"))
.script(new Script(
"List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;",
ScriptType.INLINE, null, params))
.percentiles(pcts))
.execute().actionGet();
assertHitCount(searchResponse, 10);
final Percentiles percentiles = searchResponse.getAggregations().get("percentiles");
assertConsistent(pcts, percentiles, minValues - 1, maxValues - 1);
}
@Test
public void testOrderBySubAggregation() {
boolean asc = randomBoolean();
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(
histogram("histo").field("value").interval(2l)
.subAggregation(randomCompression(percentiles("percentiles").percentiles(99)))
.order(Order.aggregation("percentiles", "99", asc)))
.execute().actionGet();
assertHitCount(searchResponse, 10);
Histogram histo = searchResponse.getAggregations().get("histo");
double previous = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
for (Histogram.Bucket bucket : histo.getBuckets()) {
Percentiles percentiles = bucket.getAggregations().get("percentiles");
double p99 = percentiles.percentile(99);
if (asc) {
assertThat(p99, greaterThanOrEqualTo(previous));
} else {
assertThat(p99, lessThanOrEqualTo(previous));
}
previous = p99;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.dbcp2.cpdsadapter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Properties;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.apache.commons.dbcp2.datasources.SharedPoolDataSource;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Tests for DriverAdapterCPDS
*
* @version $Id: TestDriverAdapterCPDS.java 1692677 2015-07-26 01:23:38Z psteitz $
*/
public class TestDriverAdapterCPDS {
private DriverAdapterCPDS pcds;
@Before
public void setUp() throws Exception {
pcds = new DriverAdapterCPDS();
pcds.setDriver("org.apache.commons.dbcp2.TesterDriver");
pcds.setUrl("jdbc:apache:commons:testdriver");
pcds.setUser("foo");
pcds.setPassword("bar");
pcds.setPoolPreparedStatements(false);
}
/**
* JIRA: DBCP-245
*/
@Test
public void testIncorrectPassword() throws Exception
{
pcds.getPooledConnection("u2", "p2").close();
try {
// Use bad password
pcds.getPooledConnection("u1", "zlsafjk");
fail("Able to retrieve connection with incorrect password");
} catch (SQLException e1) {
// should fail
}
// Use good password
pcds.getPooledConnection("u1", "p1").close();
try {
pcds.getPooledConnection("u1", "x");
fail("Able to retrieve connection with incorrect password");
}
catch (SQLException e) {
if (!e.getMessage().startsWith("x is not the correct password")) {
throw e;
}
// else the exception was expected
}
// Make sure we can still use our good password.
pcds.getPooledConnection("u1", "p1").close();
}
@Test
public void testSimple() throws Exception {
Connection conn = pcds.getPooledConnection().getConnection();
assertNotNull(conn);
PreparedStatement stmt = conn.prepareStatement("select * from dual");
assertNotNull(stmt);
ResultSet rset = stmt.executeQuery();
assertNotNull(rset);
assertTrue(rset.next());
rset.close();
stmt.close();
conn.close();
}
@Test
public void testSimpleWithUsername() throws Exception {
Connection conn = pcds.getPooledConnection("u1", "p1").getConnection();
assertNotNull(conn);
PreparedStatement stmt = conn.prepareStatement("select * from dual");
assertNotNull(stmt);
ResultSet rset = stmt.executeQuery();
assertNotNull(rset);
assertTrue(rset.next());
rset.close();
stmt.close();
conn.close();
}
@Test
public void testClosingWithUserName()
throws Exception {
Connection[] c = new Connection[10];
for (int i=0; i<c.length; i++) {
c[i] = pcds.getPooledConnection("u1", "p1").getConnection();
}
// close one of the connections
c[0].close();
assertTrue(c[0].isClosed());
// get a new connection
c[0] = pcds.getPooledConnection("u1", "p1").getConnection();
for (Connection element : c) {
element.close();
}
// open all the connections
for (int i=0; i<c.length; i++) {
c[i] = pcds.getPooledConnection("u1", "p1").getConnection();
}
for (Connection element : c) {
element.close();
}
}
@Test
public void testSetProperties() throws Exception {
// Set user property to bad value
pcds.setUser("bad");
// Supply correct value in connection properties
// This will overwrite field value
Properties properties = new Properties();
properties.put("user", "foo");
pcds.setConnectionProperties(properties);
pcds.getPooledConnection().close();
assertEquals("foo", pcds.getUser());
// Put bad password into properties
properties.put("password", "bad");
// This does not change local field
assertEquals("bar", pcds.getPassword());
// Supply correct password in getPooledConnection
// Call will succeed and overwrite property
pcds.getPooledConnection("foo", "bar").close();
assertEquals("bar", pcds.getConnectionProperties().getProperty("password"));
}
/**
* JIRA: DBCP-442
*/
@Test
public void testNullValidationQuery() throws Exception {
final SharedPoolDataSource spds = new SharedPoolDataSource();
spds.setConnectionPoolDataSource(pcds);
spds.setDefaultTestOnBorrow(true);
final Connection c = spds.getConnection();
c.close();
spds.close();
}
// https://issues.apache.org/jira/browse/DBCP-376
@Test
public void testDbcp367() throws Exception {
ThreadDbcp367[] threads = new ThreadDbcp367[200];
pcds.setPoolPreparedStatements(true);
pcds.setMaxPreparedStatements(-1);
pcds.setAccessToUnderlyingConnectionAllowed(true);
SharedPoolDataSource spds = new SharedPoolDataSource();
spds.setConnectionPoolDataSource(pcds);
spds.setMaxTotal(threads.length + 10);
spds.setDefaultMaxWaitMillis(-1);
spds.setDefaultMaxIdle(10);
spds.setDefaultAutoCommit(Boolean.FALSE);
spds.setValidationQuery("SELECT 1");
spds.setDefaultTimeBetweenEvictionRunsMillis(10000);
spds.setDefaultNumTestsPerEvictionRun(-1);
spds.setDefaultTestWhileIdle(true);
spds.setDefaultTestOnBorrow(true);
spds.setDefaultTestOnReturn(false);
for (int i = 0; i < threads.length; i++) {
threads[i] = new ThreadDbcp367(spds);
threads[i].start();
}
for (int i = 0; i < threads.length; i++) {
threads[i].join();
Assert.assertFalse("Thread " + i + " has failed",threads[i].isFailed());
}
}
private static class ThreadDbcp367 extends Thread {
private final DataSource ds;
private volatile boolean failed = false;
public ThreadDbcp367(DataSource ds) {
this.ds = ds;
}
@Override
public void run() {
Connection c = null;
try {
for (int j=0; j < 5000; j++) {
c = ds.getConnection();
c.close();
}
} catch (SQLException sqle) {
failed = true;
sqle.printStackTrace();
}
}
public boolean isFailed() {
return failed;
}
}
}
| |
package test;
import java.awt.BorderLayout;
import java.awt.Component;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTree;
import javax.swing.border.LineBorder;
import javax.swing.event.TreeExpansionEvent;
import javax.swing.event.TreeWillExpandListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeCellRenderer;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.ExpandVetoException;
import javax.swing.tree.TreeCellRenderer;
import javax.swing.tree.TreeNode;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import chav1961.purelib.basic.Utils;
import chav1961.purelib.enumerations.ContinueMode;
public class NodeLoader extends JFrame {
private static final long serialVersionUID = -842439308945374954L;
private final JTree navigator;
public NodeLoader(final NavigationNode rootCargo) {
final JSplitPane jsp = new JSplitPane();
final DefaultMutableTreeNode root = new FileTreeNode(rootCargo);
this.navigator = new JTree(root);
final DefaultTreeCellRenderer dtcr = (DefaultTreeCellRenderer)this.navigator.getCellRenderer();
this.navigator.addTreeWillExpandListener(new TreeWillExpandListener() {
@Override
public void treeWillExpand(final TreeExpansionEvent event) throws ExpandVetoException {
final FileTreeNode node = (FileTreeNode)event.getPath().getLastPathComponent();
final NavigationNode nn = (NavigationNode)node.getUserObject();
if (nn instanceof PackageNode) {
for (NavigationNode item : ((PackageNode)nn).children) {
node.add(new FileTreeNode(item));
}
}
else if (nn instanceof ClassNode) {
if (((ClassNode)nn).fields.length > 0) {
for (NavigationNode item : ((ClassNode)nn).fields) {
node.add(new FileTreeNode(item));
}
}
if (((ClassNode)nn).methods.length > 0) {
for (NavigationNode item : ((ClassNode)nn).methods) {
node.add(new FileTreeNode(item));
}
}
if (((ClassNode)nn).constructors.length > 0) {
for (NavigationNode item : ((ClassNode)nn).constructors) {
node.add(new FileTreeNode(item));
}
}
}
else if (nn instanceof FieldNode) {
}
else if (nn instanceof MethodNode) {
if (((MethodNode)nn).parmRef.length > 0) {
for (NavigationNode item : ((MethodNode)nn).parmRef) {
node.add(new FileTreeNode(item));
}
}
}
else if (nn instanceof RootNode) {
node.add(new FileTreeNode(((RootNode)nn).child));
}
((DefaultTreeModel)navigator.getModel()).nodeStructureChanged(node);
}
@Override
public void treeWillCollapse(TreeExpansionEvent event) throws ExpandVetoException {
// TODO Auto-generated method stub
}
});
this.navigator.setCellRenderer(new TreeCellRenderer() {
@Override
public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
final JLabel label = new JLabel();
if (selected) {
label.setOpaque(true);
label.setBackground(dtcr.getBackgroundSelectionColor());
label.setForeground(dtcr.getTextSelectionColor());
}
if (hasFocus) {
label.setBorder(new LineBorder(dtcr.getBorderSelectionColor()));
}
final NavigationNode nn = (NavigationNode)((FileTreeNode)value).getUserObject();
if (nn instanceof TrivialNode) {
label.setText(((TrivialNode)nn).name);
}
else {
label.setText("ROOT");
}
return label;
}
});
jsp.setLeftComponent(this.navigator);
getContentPane().add(new JScrollPane(this.navigator),BorderLayout.CENTER);
}
public static void main(String[] args) throws ParserConfigurationException, SAXException, IOException {
final DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
final DocumentBuilder builder = factory.newDocumentBuilder();
final Document doc = builder.parse(NodeLoader.class.getResourceAsStream("test.xml"));
final Element root = doc.getDocumentElement();
final List<NavigationNode> list = new ArrayList<>();
final NavigationNode[] rootNode = new NavigationNode[1];
final String[] overview = new String[1];
Utils.walkDownXML(root,(mode,node)->{
NavigationNode nn;
switch (mode) {
case ENTER :
switch (node.getTagName()) {
case "navigation" :
list.add(0,new RootNode());
break;
case "package" :
final PackageNode pn = new PackageNode();
pn.name = node.getAttribute("name");
list.add(0,pn);
break;
case "class" :
final ClassNode cn = new ClassNode();
cn.name = node.getAttribute("name");
list.add(0,cn);
break;
case "field" :
final FieldNode fn = new FieldNode();
fn.name = node.getAttribute("name");
list.add(0,fn);
break;
case "method" :
final MethodNode mn = new MethodNode();
mn.name = node.getAttribute("name");
list.add(0,mn);
break;
case "parameter" :
final FieldNode parmn = new FieldNode();
parmn.name = node.getAttribute("name");
list.add(0,parmn);
break;
case "overview" :
overview[0] = node.getTextContent();
break;
default :
}
break;
case EXIT :
switch (node.getTagName()) {
case "navigation" :
rootNode[0] = list.remove(0);
break;
case "package" :
nn = list.remove(0);
if (list.get(0) instanceof PackageNode) {
((PackageNode)list.get(0)).children = Arrays.copyOf(((PackageNode)list.get(0)).children,((PackageNode)list.get(0)).children.length+1);
((PackageNode)list.get(0)).children[((PackageNode)list.get(0)).children.length-1] = nn;
}
else if (list.get(0) instanceof RootNode) {
((RootNode)list.get(0)).child = nn;
}
break;
case "class" :
nn = list.remove(0);
if (list.get(0) instanceof PackageNode) {
((PackageNode)list.get(0)).children = Arrays.copyOf(((PackageNode)list.get(0)).children,((PackageNode)list.get(0)).children.length+1);
((PackageNode)list.get(0)).children[((PackageNode)list.get(0)).children.length-1] = nn;
}
break;
case "field" :
nn = list.remove(0);
if (list.get(0) instanceof ClassNode) {
((ClassNode)list.get(0)).fields = Arrays.copyOf(((ClassNode)list.get(0)).fields,((ClassNode)list.get(0)).fields.length+1);
((ClassNode)list.get(0)).fields[((ClassNode)list.get(0)).fields.length-1] = nn;
}
break;
case "method" :
nn = list.remove(0);
if (list.get(0) instanceof ClassNode) {
((ClassNode)list.get(0)).methods = Arrays.copyOf(((ClassNode)list.get(0)).methods,((ClassNode)list.get(0)).methods.length+1);
((ClassNode)list.get(0)).methods[((ClassNode)list.get(0)).methods.length-1] = nn;
}
break;
case "parameter" :
nn = list.remove(0);
if (list.get(0) instanceof MethodNode) {
((MethodNode)list.get(0)).parmRef = Arrays.copyOf(((MethodNode)list.get(0)).parmRef,((MethodNode)list.get(0)).parmRef.length+1);
((MethodNode)list.get(0)).parmRef[((MethodNode)list.get(0)).parmRef.length-1] = nn;
}
break;
case "overview" :
((RootNode)list.get(0)).overview = overview[0];
break;
default :
}
break;
default:
break;
}
return ContinueMode.CONTINUE;
});
new NodeLoader(rootNode[0]).setVisible(true);
}
static void printTree(final String prefix, final NavigationNode node) {
if (node instanceof PackageNode) {
System.err.print(prefix);
System.err.println("package "+((PackageNode)node).name);
for (NavigationNode item : ((PackageNode)node).children) {
printTree(prefix+"\t",item);
}
}
else if (node instanceof ClassNode) {
System.err.print(prefix);
System.err.println("Class "+((ClassNode)node).name);
if (((ClassNode)node).fields.length > 0) {
System.err.println(prefix+"- fields:");
for (NavigationNode item : ((ClassNode)node).fields) {
printTree(prefix+"\t",item);
}
}
if (((ClassNode)node).methods.length > 0) {
System.err.println(prefix+"- methods:");
for (NavigationNode item : ((ClassNode)node).methods) {
printTree(prefix+"\t",item);
}
}
if (((ClassNode)node).constructors.length > 0) {
System.err.println(prefix+"- constructors:");
for (NavigationNode item : ((ClassNode)node).constructors) {
printTree(prefix+"\t",item);
}
}
}
else if (node instanceof FieldNode) {
System.err.print(prefix);
System.err.println("field "+((FieldNode)node).name);
}
else if (node instanceof MethodNode) {
System.err.print(prefix);
System.err.println("Method "+((MethodNode)node).name);
if (((MethodNode)node).parmRef.length > 0) {
System.err.println(prefix+"- parameters:");
for (NavigationNode item : ((MethodNode)node).parmRef) {
printTree(prefix+"\t",item);
}
}
}
else if (node instanceof RootNode) {
System.err.print(prefix);
System.err.println("navigation");
printTree(prefix+"\t",((RootNode)node).child);
}
}
public static class FileTreeNode extends DefaultMutableTreeNode {
private static final long serialVersionUID = -6014820236428705486L;
public FileTreeNode(final NavigationNode current) {
super(current);
}
@Override
public boolean isLeaf() {
return (getUserObject() instanceof FieldNode);
}
}
}
| |
package org.oakcoin;
import com.google.common.io.BaseEncoding;
import java.util.Arrays;
import java.math.BigInteger;
import javax.xml.bind.DatatypeConverter;
import static org.oakcoin.NativeSecp256k1Util.*;
/**
* This class holds test cases defined for testing this library.
*/
public class NativeSecp256k1Test {
//TODO improve comments/add more tests
/**
* This tests verify() for a valid signature
*/
public static void testVerifyPos() throws AssertFailException{
boolean result = false;
byte[] data = BaseEncoding.base16().lowerCase().decode("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90".toLowerCase()); //sha256hash of "testing"
byte[] sig = BaseEncoding.base16().lowerCase().decode("3044022079BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980220294F14E883B3F525B5367756C2A11EF6CF84B730B36C17CB0C56F0AAB2C98589".toLowerCase());
byte[] pub = BaseEncoding.base16().lowerCase().decode("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40".toLowerCase());
result = NativeSecp256k1.verify( data, sig, pub);
assertEquals( result, true , "testVerifyPos");
}
/**
* This tests verify() for a non-valid signature
*/
public static void testVerifyNeg() throws AssertFailException{
boolean result = false;
byte[] data = BaseEncoding.base16().lowerCase().decode("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A91".toLowerCase()); //sha256hash of "testing"
byte[] sig = BaseEncoding.base16().lowerCase().decode("3044022079BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F817980220294F14E883B3F525B5367756C2A11EF6CF84B730B36C17CB0C56F0AAB2C98589".toLowerCase());
byte[] pub = BaseEncoding.base16().lowerCase().decode("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40".toLowerCase());
result = NativeSecp256k1.verify( data, sig, pub);
//System.out.println(" TEST " + new BigInteger(1, resultbytes).toString(16));
assertEquals( result, false , "testVerifyNeg");
}
/**
* This tests secret key verify() for a valid secretkey
*/
public static void testSecKeyVerifyPos() throws AssertFailException{
boolean result = false;
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
result = NativeSecp256k1.secKeyVerify( sec );
//System.out.println(" TEST " + new BigInteger(1, resultbytes).toString(16));
assertEquals( result, true , "testSecKeyVerifyPos");
}
/**
* This tests secret key verify() for a invalid secretkey
*/
public static void testSecKeyVerifyNeg() throws AssertFailException{
boolean result = false;
byte[] sec = BaseEncoding.base16().lowerCase().decode("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".toLowerCase());
result = NativeSecp256k1.secKeyVerify( sec );
//System.out.println(" TEST " + new BigInteger(1, resultbytes).toString(16));
assertEquals( result, false , "testSecKeyVerifyNeg");
}
/**
* This tests public key create() for a valid secretkey
*/
public static void testPubKeyCreatePos() throws AssertFailException{
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
byte[] resultArr = NativeSecp256k1.computePubkey( sec);
String pubkeyString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( pubkeyString , "04C591A8FF19AC9C4E4E5793673B83123437E975285E7B442F4EE2654DFFCA5E2D2103ED494718C697AC9AEBCFD19612E224DB46661011863ED2FC54E71861E2A6" , "testPubKeyCreatePos");
}
/**
* This tests public key create() for a invalid secretkey
*/
public static void testPubKeyCreateNeg() throws AssertFailException{
byte[] sec = BaseEncoding.base16().lowerCase().decode("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".toLowerCase());
byte[] resultArr = NativeSecp256k1.computePubkey( sec);
String pubkeyString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( pubkeyString, "" , "testPubKeyCreateNeg");
}
/**
* This tests sign() for a valid secretkey
*/
public static void testSignPos() throws AssertFailException{
byte[] data = BaseEncoding.base16().lowerCase().decode("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90".toLowerCase()); //sha256hash of "testing"
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
byte[] resultArr = NativeSecp256k1.sign(data, sec);
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString, "30440220182A108E1448DC8F1FB467D06A0F3BB8EA0533584CB954EF8DA112F1D60E39A202201C66F36DA211C087F3AF88B50EDF4F9BDAA6CF5FD6817E74DCA34DB12390C6E9" , "testSignPos");
}
/**
* This tests sign() for a invalid secretkey
*/
public static void testSignNeg() throws AssertFailException{
byte[] data = BaseEncoding.base16().lowerCase().decode("CF80CD8AED482D5D1527D7DC72FCEFF84E6326592848447D2DC0B0E87DFC9A90".toLowerCase()); //sha256hash of "testing"
byte[] sec = BaseEncoding.base16().lowerCase().decode("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF".toLowerCase());
byte[] resultArr = NativeSecp256k1.sign(data, sec);
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString, "" , "testSignNeg");
}
/**
* This tests private key tweak-add
*/
public static void testPrivKeyTweakAdd_1() throws AssertFailException {
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
byte[] data = BaseEncoding.base16().lowerCase().decode("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3".toLowerCase()); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.privKeyTweakAdd( sec , data );
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString , "A168571E189E6F9A7E2D657A4B53AE99B909F7E712D1C23CED28093CD57C88F3" , "testPrivKeyAdd_1");
}
/**
* This tests private key tweak-mul
*/
public static void testPrivKeyTweakMul_1() throws AssertFailException {
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
byte[] data = BaseEncoding.base16().lowerCase().decode("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3".toLowerCase()); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.privKeyTweakMul( sec , data );
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString , "97F8184235F101550F3C71C927507651BD3F1CDB4A5A33B8986ACF0DEE20FFFC" , "testPrivKeyMul_1");
}
/**
* This tests private key tweak-add uncompressed
*/
public static void testPrivKeyTweakAdd_2() throws AssertFailException {
byte[] pub = BaseEncoding.base16().lowerCase().decode("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40".toLowerCase());
byte[] data = BaseEncoding.base16().lowerCase().decode("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3".toLowerCase()); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.pubKeyTweakAdd( pub , data );
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString , "0411C6790F4B663CCE607BAAE08C43557EDC1A4D11D88DFCB3D841D0C6A941AF525A268E2A863C148555C48FB5FBA368E88718A46E205FABC3DBA2CCFFAB0796EF" , "testPrivKeyAdd_2");
}
/**
* This tests private key tweak-mul uncompressed
*/
public static void testPrivKeyTweakMul_2() throws AssertFailException {
byte[] pub = BaseEncoding.base16().lowerCase().decode("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40".toLowerCase());
byte[] data = BaseEncoding.base16().lowerCase().decode("3982F19BEF1615BCCFBB05E321C10E1D4CBA3DF0E841C2E41EEB6016347653C3".toLowerCase()); //sha256hash of "tweak"
byte[] resultArr = NativeSecp256k1.pubKeyTweakMul( pub , data );
String sigString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( sigString , "04E0FE6FE55EBCA626B98A807F6CAF654139E14E5E3698F01A9A658E21DC1D2791EC060D4F412A794D5370F672BC94B722640B5F76914151CFCA6E712CA48CC589" , "testPrivKeyMul_2");
}
/**
* This tests seed randomization
*/
public static void testRandomize() throws AssertFailException {
byte[] seed = BaseEncoding.base16().lowerCase().decode("A441B15FE9A3CF56661190A0B93B9DEC7D04127288CC87250967CF3B52894D11".toLowerCase()); //sha256hash of "random"
boolean result = NativeSecp256k1.randomize(seed);
assertEquals( result, true, "testRandomize");
}
public static void testCreateECDHSecret() throws AssertFailException{
byte[] sec = BaseEncoding.base16().lowerCase().decode("67E56582298859DDAE725F972992A07C6C4FB9F62A8FFF58CE3CA926A1063530".toLowerCase());
byte[] pub = BaseEncoding.base16().lowerCase().decode("040A629506E1B65CD9D2E0BA9C75DF9C4FED0DB16DC9625ED14397F0AFC836FAE595DC53F8B0EFE61E703075BD9B143BAC75EC0E19F82A2208CAEB32BE53414C40".toLowerCase());
byte[] resultArr = NativeSecp256k1.createECDHSecret(sec, pub);
String ecdhString = javax.xml.bind.DatatypeConverter.printHexBinary(resultArr);
assertEquals( ecdhString, "2A2A67007A926E6594AF3EB564FC74005B37A9C8AEF2033C4552051B5C87F043" , "testCreateECDHSecret");
}
public static void main(String[] args) throws AssertFailException{
System.out.println("\n libsecp256k1 enabled: " + Secp256k1Context.isEnabled() + "\n");
assertEquals( Secp256k1Context.isEnabled(), true, "isEnabled" );
//Test verify() success/fail
testVerifyPos();
testVerifyNeg();
//Test secKeyVerify() success/fail
testSecKeyVerifyPos();
testSecKeyVerifyNeg();
//Test computePubkey() success/fail
testPubKeyCreatePos();
testPubKeyCreateNeg();
//Test sign() success/fail
testSignPos();
testSignNeg();
//Test privKeyTweakAdd() 1
testPrivKeyTweakAdd_1();
//Test privKeyTweakMul() 2
testPrivKeyTweakMul_1();
//Test privKeyTweakAdd() 3
testPrivKeyTweakAdd_2();
//Test privKeyTweakMul() 4
testPrivKeyTweakMul_2();
//Test randomize()
testRandomize();
//Test ECDH
testCreateECDHSecret();
NativeSecp256k1.cleanup();
System.out.println(" All tests passed." );
}
}
| |
package com.nus;
import Jama.Matrix;
/**
* Created by duy on 20/1/15.
*/
public class LmSolver {
// Configuration parameters for Levenberg-Marquadt algorithm
private double dampingFactor;
private int maxNumIter;
private double gradientEpsilon;
private double changeEpsilon;
private LmModelError errorFunc;
public LmSolver(LmModelError inErrorFunc) {
this.dampingFactor = 0.001;
this.maxNumIter = 10;
this.gradientEpsilon = 1e-8;
this.changeEpsilon = 1e-8;
this.errorFunc = inErrorFunc;
}
public LmSolver(
LmModelError errorFunc,
double damping,
int maxNumIter,
double gradientEpsilon,
double changeEpsilon) {
this.dampingFactor = damping;
this.maxNumIter = maxNumIter;
this.gradientEpsilon = gradientEpsilon;
this.changeEpsilon = changeEpsilon;
this.errorFunc = errorFunc;
}
public double getDampingFactor() {
return dampingFactor;
}
public void setDampingFactor(double dampingFactor) {
this.dampingFactor = dampingFactor;
}
public int getMaxNumIter() {
return maxNumIter;
}
public LmModelError getErrorFunc() {
return errorFunc;
}
public void setMaxNumIter(int maxNumIter) {
this.maxNumIter = maxNumIter;
}
public double getGradientEpsilon() {
return gradientEpsilon;
}
public void setGradientEpsilon(double gradientEpsilon) {
this.gradientEpsilon = gradientEpsilon;
}
public double getChangeEpsilon() {
return changeEpsilon;
}
public void setChangeEpsilon(double changeEpsilon) {
this.changeEpsilon = changeEpsilon;
}
/**
* Applies Levenberg-Marquadt algorithm on the input error function with the
* input initial guess of optimization parameters
*
* @param optParams A vector of initial guess of values of parameters
* for optimization
* @param paramHandler A handler which is called to adjust values of
* the Levenberg-Marquadt parameters after they are
* updated at the end of each iteration in the algorithm.
* If {@code paramHandler} is null, no further adjustment
* to the updated parameters is performed. This is useful
* when Levenberg-Marquadt algorithm is performed on
* structures such as quaternions. Note that the
* way updated parameters are modified can affect
* correctness of the Levenberg-Marquadt algorithm
* @param approxHessianFlg A boolean flag to indicate whether the Hessian
* matrix used in the Levenberg-Marquadt algorithm
* should be approximated or computed exactly. If
* {@code true}, the Hessian matrix will be
* approximated by the Jacobian matrix
*/
public void solve(
double[] optParams,
LmParamHandler paramHandler,
boolean approxHessianFlg) {
int iter = 0;
int numOptParams = optParams.length;
double penaltyFactor = 2.0;
double lambda = 0.0;
double errValue = errorFunc.eval(optParams);
while (iter < maxNumIter) {
iter++;
// Compute gradient vector
double[] gradient = errorFunc.jacobian(optParams);
Matrix gradientMat = new Matrix(gradient, numOptParams);
if (gradientMat.normInf() < gradientEpsilon) {
break;
}
// Compute modified Hessian matrix
double[][] modifiedHessian = errorFunc.hessian(
optParams, approxHessianFlg);
if (iter == 1) {
// Initialize damping value on the first iteration
double diagonalMax = modifiedHessian[0][0];
for (int i = 1; i < modifiedHessian.length; ++i) {
diagonalMax = Math.max(diagonalMax, modifiedHessian[i][i]);
}
lambda = dampingFactor * diagonalMax;
}
for (int i = 0; i < numOptParams; ++i) {
modifiedHessian[i][i] += lambda;
}
Matrix modifiedHessianMat = new Matrix(modifiedHessian);
// Solve augmented normal equation
Matrix direction = JamaHelper.solvePSDMatrixEq(
modifiedHessianMat,
gradientMat.uminus()
);
if (direction == null) {
// Modified Hessian matrix is not positive definite
lambda *= penaltyFactor;
penaltyFactor *= 2.0;
continue;
}
// Stop if the change in optimized parameter vectors is negligible
Matrix paramVector = new Matrix(optParams, numOptParams);
if (direction.normF() <
changeEpsilon * (paramVector.normF() + changeEpsilon)) {
break;
}
double[] newOptParams = paramVector.plus(direction).getRowPackedCopy();
// Compute gain ratio between actual and predicted gain
double newErrValue = errorFunc.eval(newOptParams);
double predictedGain = 0.5 * (
lambda * JamaHelper.dotProduct(direction, direction) -
JamaHelper.dotProduct(gradientMat, direction)
);
double gainRatio = (errValue - newErrValue) / predictedGain;
// Update optimized parameter vector and
// damping value in augmented normal equation
if (gainRatio > 0) {
for (int i = 0; i < numOptParams; ++i) {
optParams[i] = newOptParams[i];
}
errValue = newErrValue;
penaltyFactor = 2.0;
lambda *= Math.max(1.0 / 3.0, 1 - Math.pow(2.0 * gainRatio - 1, 3));
} else {
lambda *= penaltyFactor;
penaltyFactor *= 2.0;
}
// Adjust updated values of the parameters
if (paramHandler != null) {
paramHandler.adjust(optParams);
}
}
}
/**
* Applies Levenberg-Marquadt algorithm on the input error function with the
* input initial guess of optimization parameters. Note that the Hessian
* matrix used in the Levenberg-Marquadt will be computed exactly
*
* @param optParams A vector of initial guess of values of parameters
* for optimization
*/
public void solve(double[] optParams) {
this.solve(optParams, null, false);
}
/**
* Applies Levenberg-Marquadt algorithm on the input error function with the
* input initial guess of optimization parameters.
*
* @param optParams A vector of initial guess of values of parameters
* for optimization
* @param approxHessianFlg A boolean flag to indicate whether the Hessian
* matrix used in the Levenberg-Marquadt algorithm
* should be approximated or computed exactly. If
* {@code true}, the Hessian matrix will be
* approximated by the Jacobian matrix
*/
public void solve(double[] optParams, boolean approxHessianFlg) {
this.solve(optParams, null, approxHessianFlg);
}
/**
* Applies Levenberg-Marquadt algorithm on the input error function with the
* input initial guess of optimization parameters. Note that the Hessian
* matrix used in the Levenberg-Marquadt will be computed exactly
*
* @param optParams A vector of initial guess of values of parameters
* for optimization
* @param paramHandler A handler which is called to adjust values of
* the Levenberg-Marquadt parameters after they are
* updated at the end of each iteration in the algorithm.
* If {@code paramHandler} is null, no further adjustment
* to the updated parameters is performed. This is useful
* when Levenberg-Marquadt algorithm is performed on
* structures such as quaternions. Note that the
* way updated parameters are modified can affect
* correctness of the Levenberg-Marquadt algorithm
*/
public void solve(double[] optParams, LmParamHandler paramHandler) {
this.solve(optParams, paramHandler, false);
}
private static Matrix approximateHessian(Matrix jacobian) {
return jacobian.transpose().times(jacobian);
}
}
| |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.core.segment.memory;
import com.google.common.annotations.VisibleForTesting;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The class <code>PinotDataBuffer</code> is the byte buffer for Pinot data that resides in off-heap memory.
*
* <p>The byte buffer may be memory mapped (MMAP) or direct allocated (DIRECT).
* <p>Supports buffers larger than 2GB.
* <p>This class will also track the number and memory usage of the buffers.
* <p>NOTE: All the accesses to the buffer are unchecked for performance reason. Behavior of accessing buffer with
* invalid index is undefined.
* <p>Backward-compatible:
* <ul>
* <li>Index file (forward index, inverted index, dictionary) is always big-endian</li>
* <li>Star-tree file is always little-endian</li>
* <li>Temporary buffer should be allocated using native-order for performance</li>
* </ul>
*/
@ThreadSafe
public abstract class PinotDataBuffer implements Closeable {
public static final ByteOrder NATIVE_ORDER = ByteOrder.nativeOrder();
public static final ByteOrder NON_NATIVE_ORDER =
NATIVE_ORDER == ByteOrder.BIG_ENDIAN ? ByteOrder.LITTLE_ENDIAN : ByteOrder.BIG_ENDIAN;
private static final Logger LOGGER = LoggerFactory.getLogger(PinotDataBuffer.class);
// We use this threshold to decide whether we use bulk bytes processing or not
// With number of bytes less than this threshold, we get/put bytes one by one
// With number of bytes more than this threshold, we create a ByteBuffer from the buffer and use bulk get/put method
protected static int BULK_BYTES_PROCESSING_THRESHOLD = 10;
private static class BufferContext {
enum Type {
DIRECT, MMAP
}
final Type _type;
final long _size;
final String _filePath;
final String _description;
BufferContext(Type type, long size, @Nullable String filePath, @Nullable String description) {
_type = type;
_size = size;
_filePath = filePath;
_description = description;
}
@Override
public String toString() {
String context = "Type: " + _type + ", Size: " + _size;
if (_filePath != null) {
context += ", File Path: " + _filePath;
}
if (_description != null) {
context += ", Description: " + _description;
}
return context;
}
}
private static final AtomicLong DIRECT_BUFFER_COUNT = new AtomicLong();
private static final AtomicLong DIRECT_BUFFER_USAGE = new AtomicLong();
private static final AtomicLong MMAP_BUFFER_COUNT = new AtomicLong();
private static final AtomicLong MMAP_BUFFER_USAGE = new AtomicLong();
private static final AtomicLong ALLOCATION_FAILURE_COUNT = new AtomicLong();
private static final Map<PinotDataBuffer, BufferContext> BUFFER_CONTEXT_MAP = new WeakHashMap<>();
/**
* Allocates a buffer using direct memory.
* <p>NOTE: The contents of the allocated buffer are not defined.
*
* @param size The size of the buffer
* @param byteOrder The byte order of the buffer (big-endian or little-endian)
* @param description The description of the buffer
* @return The buffer allocated
*/
public static PinotDataBuffer allocateDirect(long size, ByteOrder byteOrder, @Nullable String description) {
PinotDataBuffer buffer;
try {
if (size <= Integer.MAX_VALUE) {
buffer = PinotByteBuffer.allocateDirect((int) size, byteOrder);
} else {
if (byteOrder == NATIVE_ORDER) {
buffer = PinotNativeOrderLBuffer.allocateDirect(size);
} else {
buffer = PinotNonNativeOrderLBuffer.allocateDirect(size);
}
}
} catch (Exception e) {
LOGGER.error("Caught exception while allocating direct buffer of size: {} with description: {}", size,
description, e);
LOGGER.error("Buffer stats: {}", getBufferStats());
ALLOCATION_FAILURE_COUNT.getAndIncrement();
throw e;
}
DIRECT_BUFFER_COUNT.getAndIncrement();
DIRECT_BUFFER_USAGE.getAndAdd(size);
synchronized (BUFFER_CONTEXT_MAP) {
BUFFER_CONTEXT_MAP.put(buffer, new BufferContext(BufferContext.Type.DIRECT, size, null, description));
}
return buffer;
}
/**
* Allocates a buffer using direct memory and loads a file into the buffer.
*/
public static PinotDataBuffer loadFile(File file, long offset, long size, ByteOrder byteOrder,
@Nullable String description) throws IOException {
PinotDataBuffer buffer;
try {
if (size <= Integer.MAX_VALUE) {
buffer = PinotByteBuffer.loadFile(file, offset, (int) size, byteOrder);
} else {
if (byteOrder == NATIVE_ORDER) {
buffer = PinotNativeOrderLBuffer.loadFile(file, offset, size);
} else {
buffer = PinotNonNativeOrderLBuffer.loadFile(file, offset, size);
}
}
} catch (Exception e) {
LOGGER.error("Caught exception while loading file: {} from offset: {} of size: {} with description: {}",
file.getAbsolutePath(), offset, size, description, e);
LOGGER.error("Buffer stats: {}", getBufferStats());
ALLOCATION_FAILURE_COUNT.getAndIncrement();
throw e;
}
DIRECT_BUFFER_COUNT.getAndIncrement();
DIRECT_BUFFER_USAGE.getAndAdd(size);
synchronized (BUFFER_CONTEXT_MAP) {
BUFFER_CONTEXT_MAP.put(buffer,
new BufferContext(BufferContext.Type.DIRECT, size, file.getAbsolutePath().intern(), description));
}
return buffer;
}
/**
* Allocates a buffer using direct memory and loads a big-endian file into the buffer.
*/
@VisibleForTesting
public static PinotDataBuffer loadBigEndianFile(File file) throws IOException {
return loadFile(file, 0, file.length(), ByteOrder.BIG_ENDIAN, null);
}
/**
* Memory maps a file into a buffer.
* <p>NOTE: If the file gets extended, the contents of the extended portion of the file are not defined.
*/
public static PinotDataBuffer mapFile(File file, boolean readOnly, long offset, long size, ByteOrder byteOrder,
@Nullable String description) throws IOException {
PinotDataBuffer buffer;
try {
if (size <= Integer.MAX_VALUE) {
buffer = PinotByteBuffer.mapFile(file, readOnly, offset, (int) size, byteOrder);
} else {
if (byteOrder == NATIVE_ORDER) {
buffer = PinotNativeOrderLBuffer.mapFile(file, readOnly, offset, size);
} else {
buffer = PinotNonNativeOrderLBuffer.mapFile(file, readOnly, offset, size);
}
}
} catch (Exception e) {
LOGGER.error("Caught exception while mapping file: {} from offset: {} of size: {} with description: {}",
file.getAbsolutePath(), offset, size, description, e);
LOGGER.error("Buffer stats: {}", getBufferStats());
ALLOCATION_FAILURE_COUNT.getAndIncrement();
throw e;
}
MMAP_BUFFER_COUNT.getAndIncrement();
MMAP_BUFFER_USAGE.getAndAdd(size);
synchronized (BUFFER_CONTEXT_MAP) {
BUFFER_CONTEXT_MAP.put(buffer,
new BufferContext(BufferContext.Type.MMAP, size, file.getAbsolutePath().intern(), description));
}
return buffer;
}
/**
* Memory maps a read-only big-endian file into a buffer.
*/
@VisibleForTesting
public static PinotDataBuffer mapReadOnlyBigEndianFile(File file) throws IOException {
return mapFile(file, true, 0, file.length(), ByteOrder.BIG_ENDIAN, null);
}
public static long getDirectBufferCount() {
return DIRECT_BUFFER_COUNT.get();
}
public static long getDirectBufferUsage() {
return DIRECT_BUFFER_USAGE.get();
}
public static long getMmapBufferCount() {
return MMAP_BUFFER_COUNT.get();
}
public static long getMmapBufferUsage() {
return MMAP_BUFFER_USAGE.get();
}
public static long getAllocationFailureCount() {
return ALLOCATION_FAILURE_COUNT.get();
}
public static List<String> getBufferInfo() {
synchronized (BUFFER_CONTEXT_MAP) {
List<String> bufferInfo = new ArrayList<>(BUFFER_CONTEXT_MAP.size());
for (BufferContext bufferContext : BUFFER_CONTEXT_MAP.values()) {
bufferInfo.add(bufferContext.toString());
}
return bufferInfo;
}
}
private static String getBufferStats() {
return String.format("Direct buffer count: %s, size: %s; Mmap buffer count: %s, size: %s",
DIRECT_BUFFER_COUNT.get(), DIRECT_BUFFER_USAGE.get(), MMAP_BUFFER_COUNT.get(), MMAP_BUFFER_USAGE.get());
}
private boolean _closeable;
protected PinotDataBuffer(boolean closeable) {
_closeable = closeable;
}
@Override
public synchronized void close() throws IOException {
if (_closeable) {
flush();
release();
BufferContext bufferContext;
synchronized (BUFFER_CONTEXT_MAP) {
bufferContext = BUFFER_CONTEXT_MAP.remove(this);
}
if (bufferContext != null) {
if (bufferContext._type == BufferContext.Type.DIRECT) {
DIRECT_BUFFER_COUNT.getAndDecrement();
DIRECT_BUFFER_USAGE.getAndAdd(-bufferContext._size);
} else {
MMAP_BUFFER_COUNT.getAndDecrement();
MMAP_BUFFER_USAGE.getAndAdd(-bufferContext._size);
}
}
_closeable = false;
}
}
public abstract byte getByte(int offset);
public abstract byte getByte(long offset);
public abstract void putByte(int offset, byte value);
public abstract void putByte(long offset, byte value);
public abstract char getChar(int offset);
public abstract char getChar(long offset);
public abstract void putChar(int offset, char value);
public abstract void putChar(long offset, char value);
public abstract short getShort(int offset);
public abstract short getShort(long offset);
public abstract void putShort(int offset, short value);
public abstract void putShort(long offset, short value);
public abstract int getInt(int offset);
public abstract int getInt(long offset);
public abstract void putInt(int offset, int value);
public abstract void putInt(long offset, int value);
public abstract long getLong(int offset);
public abstract long getLong(long offset);
public abstract void putLong(int offset, long value);
public abstract void putLong(long offset, long value);
public abstract float getFloat(int offset);
public abstract float getFloat(long offset);
public abstract void putFloat(int offset, float value);
public abstract void putFloat(long offset, float value);
public abstract double getDouble(int offset);
public abstract double getDouble(long offset);
public abstract void putDouble(int offset, double value);
public abstract void putDouble(long offset, double value);
public abstract void copyTo(long offset, byte[] buffer, int destOffset, int size);
public void copyTo(long offset, byte[] buffer) {
copyTo(offset, buffer, 0, buffer.length);
}
public abstract void copyTo(long offset, PinotDataBuffer buffer, long destOffset, long size);
public abstract void readFrom(long offset, byte[] buffer, int srcOffset, int size);
public void readFrom(long offset, byte[] buffer) {
readFrom(offset, buffer, 0, buffer.length);
}
public abstract void readFrom(long offset, ByteBuffer buffer);
public abstract void readFrom(long offset, File file, long srcOffset, long size) throws IOException;
public abstract long size();
public abstract ByteOrder order();
/**
* Creates a view of the range [start, end) of this buffer with the given byte order. Calling {@link #flush()} or
* {@link #close()} has no effect on view.
*/
public abstract PinotDataBuffer view(long start, long end, ByteOrder byteOrder);
/**
* Creates a view of the range [start, end) of this buffer with the current byte order. Calling {@link #flush()} or
* {@link #close()} has no effect on view.
*/
public PinotDataBuffer view(long start, long end) {
return view(start, end, order());
}
public abstract ByteBuffer toDirectByteBuffer(long offset, int size, ByteOrder byteOrder);
public ByteBuffer toDirectByteBuffer(long offset, int size) {
return toDirectByteBuffer(offset, size, order());
}
public abstract void flush();
protected abstract void release() throws IOException;
}
| |
package com.blogspot.techzealous.usbplug;
import java.io.File;
import java.util.ArrayList;
import android.app.Activity;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.Toast;
import com.blogspot.techzealous.usbplug.utils.ConnectionInfoUP;
import com.blogspot.techzealous.usbplug.utils.ListAdapterHistory;
import com.blogspot.techzealous.usbplug.utils.UpConstants;
public class HistoryActivity extends Activity {
private LinearLayout linearLayoutLoading;
private ListView listViewHistory;
private ListAdapterHistory adapterHistory;
// private Button buttonClear;
private SharedPreferences prefs;
private SQLiteDatabase myDB;
private Thread threadLoadDB;
private Runnable runnableWhenLoaded;
private Handler myHandler;
private final String mNAME = "name";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.history);
linearLayoutLoading = (LinearLayout) findViewById(R.id.linearLayoutHistoryLoading);
listViewHistory = (ListView) findViewById(R.id.listViewHistory);
// buttonClear = (Button) findViewById(R.id.buttonHistoryClear);
prefs = PreferenceManager.getDefaultSharedPreferences(this);
myHandler = new Handler();
runnableWhenLoaded = new Runnable() {
public void run() {
adapterHistory = new ListAdapterHistory(HistoryActivity.this, UpConstants.history);
listViewHistory.setAdapter(adapterHistory);
linearLayoutLoading.setVisibility(View.GONE);
}
};
if(prefs.getBoolean(UpConstants.PREF_FIRST_START, true)) {
File dbFile = this.getDatabasePath(UpConstants.UP_DB);
if(dbFile.exists()) {
Toast.makeText(HistoryActivity.this, R.string.toast_migratedb, Toast.LENGTH_LONG).show();
migrateDb();
}
prefs.edit().putBoolean(UpConstants.PREF_FIRST_START, false).commit();
}
loadDB();
// buttonClear.setOnClickListener(new OnClickListener() {
// @Override
// public void onClick(View v) {
// clearHistory();
// }
// });
}
@Override
public void onResume() {
super.onResume();
if(linearLayoutLoading.getVisibility() == View.GONE) {
SQLiteDatabase DBadd = HistoryActivity.this.openOrCreateDatabase(UpConstants.UP_DB, MODE_PRIVATE, null);
Cursor curAdd = DBadd.rawQuery("SELECT * FROM " + UpConstants.TIME_TABLE + " WHERE " + UpConstants.TIME_TABLE_COLUM_TIME + "=(SELECT max(" + UpConstants.TIME_TABLE_COLUM_TIME + ") FROM " + UpConstants.TIME_TABLE + ")", null);
if(curAdd != null) {
if(curAdd.moveToFirst()) {
ConnectionInfoUP conInfo = new ConnectionInfoUP();
long ms = curAdd.getLong(curAdd.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_TIME));
int status = curAdd.getInt(curAdd.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_STATUS));
String type = curAdd.getString(curAdd.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_TYPE));
int charge = curAdd.getInt(curAdd.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_CHARGE));
conInfo.setTime(ms);
conInfo.setType(type);
conInfo.setChargePercents(charge);
if(status == 1) {
conInfo.setStatus(true);
} else {
conInfo.setStatus(false);
}
//if the last entry in the db is not also the last entry in the ArrayList<ConnectionInfoUp> (there was a new addition to the db) add it to the list too.
if(UpConstants.history.size() > 0) {
ConnectionInfoUP conLast = UpConstants.history.get(UpConstants.history.size() - 1);
if(conLast.getTime() != conInfo.getTime()) {
UpConstants.history.add(conInfo);
adapterHistory.notifyDataSetChanged();
}
}
}
}
curAdd.close();
DBadd.close();
}
}
private void loadDB() {
linearLayoutLoading.setVisibility(View.VISIBLE);
threadLoadDB = new Thread(new Runnable() {
public void run() {
UpConstants.history.clear();
myDB = HistoryActivity.this.openOrCreateDatabase(UpConstants.UP_DB, MODE_PRIVATE, null);
myDB.execSQL(UpConstants.UP_CREATE_TABLE);
Cursor c = myDB.query(UpConstants.TIME_TABLE, null, null, null, null, null, null);
if(c != null) {
if(c.moveToFirst()) {
do {
ConnectionInfoUP conInfo = new ConnectionInfoUP();
long ms = c.getLong(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_TIME));
int status = c.getInt(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_STATUS));
String type = c.getString(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_TYPE));
int charge = c.getInt(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_CHARGE));
conInfo.setTime(ms);
conInfo.setType(type);
conInfo.setChargePercents(charge);
if(status == 1) {
conInfo.setStatus(true);
} else {
conInfo.setStatus(false);
}
UpConstants.history.add(conInfo);
} while(c.moveToNext());
}
}
c.close();
myDB.close();
myHandler.post(runnableWhenLoaded);
}
});
threadLoadDB.start();
}
private void migrateDb() {
myDB = HistoryActivity.this.openOrCreateDatabase(UpConstants.UP_DB, MODE_PRIVATE, null);
//check to see if the db was upgraded to the new scheme and if not migrate it to the new scheme
boolean isShouldMigrate = true;
Cursor curMigrate = myDB.rawQuery("PRAGMA table_info(" + UpConstants.TIME_TABLE + ")", null);
if(curMigrate != null) {
if(curMigrate.moveToFirst()) {
do {
String colName = curMigrate.getString(curMigrate.getColumnIndex(mNAME));
if(colName.equalsIgnoreCase(UpConstants.TIME_TABLE_COLUM_TYPE)) {
isShouldMigrate = false;
break;
}
} while(curMigrate.moveToNext());
}
curMigrate.close();
}
if(isShouldMigrate) {
linearLayoutLoading.setVisibility(View.VISIBLE);
ArrayList<ConnectionInfoUP> arrayTemp = new ArrayList<ConnectionInfoUP>();
Cursor c = myDB.query(UpConstants.TIME_TABLE, null, null, null, null, null, null);
if(c != null) {
if(c.moveToFirst()) {
do {
ConnectionInfoUP conInfo = new ConnectionInfoUP();
long ms = c.getLong(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_TIME));
int status = c.getInt(c.getColumnIndexOrThrow(UpConstants.TIME_TABLE_COLUM_STATUS));
String type = "";
int charge = 0;
conInfo.setTime(ms);
conInfo.setType(type);
conInfo.setChargePercents(charge);
if(status == 1) {
conInfo.setStatus(true);
} else {
conInfo.setStatus(false);
}
arrayTemp.add(conInfo);
} while(c.moveToNext());
}
myDB.execSQL("DROP TABLE IF EXISTS " + UpConstants.TIME_TABLE);
myDB.execSQL(UpConstants.UP_CREATE_TABLE);
for(ConnectionInfoUP con : arrayTemp) {
long timeLast = con.getTime();
boolean status = con.getStatus();
String typeLast = con.getType();
int chargeLast = con.getChargePercents();
int statusLast = 0;
if(status) { statusLast = 1;}
myDB.execSQL("INSERT INTO " + UpConstants.TIME_TABLE
+ " (" + UpConstants.TIME_TABLE_COLUM_TIME + ", " + UpConstants.TIME_TABLE_COLUM_STATUS + ", " + UpConstants.TIME_TABLE_COLUM_TYPE
+ ", " + UpConstants.TIME_TABLE_COLUM_CHARGE + ")" + " VALUES('" + timeLast + "', '" + statusLast + "', '"
+ typeLast + "', '" + chargeLast + "');");
}
}
c.close();
myDB.close();
linearLayoutLoading.setVisibility(View.GONE);
}
}
// private void clearHistory() {
// UpConstants.history.clear();
//
// myDB = HistoryActivity.this.openOrCreateDatabase(UpConstants.UP_DB, MODE_PRIVATE, null);
// myDB.execSQL("DROP TABLE IF EXISTS " + UpConstants.TIME_TABLE);
// myDB.close();
//
// adapterHistory.notifyDataSetChanged();
// }
}
| |
/*
* RDV
* Real-time Data Viewer
* http://rdv.googlecode.com/
*
* Copyright (c) 2005-2007 University at Buffalo
* Copyright (c) 2005-2007 NEES Cyberinfrastructure Center
* Copyright (c) 2008 Palta Software
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* $URL$
* $Revision$
* $Date$
* $Author$
*/
package org.rdv.rbnb;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.rdv.DataViewer;
import org.rdv.data.Channel;
import org.rdv.data.LocalChannel;
import org.rdv.data.LocalChannelManager;
import com.rbnb.sapi.ChannelMap;
import com.rbnb.sapi.ChannelTree;
import com.rbnb.sapi.LocalChannelMap;
import com.rbnb.sapi.SAPIException;
import com.rbnb.sapi.Sink;
/**
* A class to manage a connection to an RBNB server and to post channel data to
* interested listeners.
*
* @author Jason P. Hanley
*/
public class RBNBController implements Player {
static Log log = LogFactory.getLog(RBNBController.class.getName());
/** the single instance of this class */
protected static RBNBController instance;
private String rbnbSinkName = "RDV";
private Thread rbnbThread;
private int state;
private Sink sink;
private String rbnbHostName;
private int rbnbPortNumber;
private static final String DEFAULT_RBNB_HOST_NAME = "localhost";
private static final int DEFAULT_RBNB_PORT_NUMBER = 3333;
private boolean requestIsMonitor;
private ChannelMap requestedChannels;
private ChannelManager channelManager;
private MetadataManager metadataManager;
private MarkerManager markerManager;
private List<TimeListener> timeListeners;
private List<StateListener> stateListeners;
private List<SubscriptionListener> subscriptionListeners;
private List<PlaybackRateListener> playbackRateListeners;
private List<TimeScaleListener> timeScaleChangeListeners;
private List<MessageListener> messageListeners;
private List<ConnectionListener> connectionListeners;
private LocalChannelMap preFetchChannelMap;
private Object preFetchLock = new Object();
private boolean preFetchDone;
private double location;
private double playbackRate;
private double timeScale;
private double updateLocation = -1;
private Object updateLocationLock = new Object();
private double updateTimeScale = -1;
private Object updateTimeScaleLock = new Object();
private double updatePlaybackRate = -1;
private Object updatePlaybackRateLock = new Object();
private List<Integer> stateChangeRequests = new ArrayList<Integer>();
private List<SubscriptionRequest> updateSubscriptionRequests = new ArrayList<SubscriptionRequest>();
private boolean dropData;
private final double PLAYBACK_REFRESH_RATE = 0.05;
private final long LOADING_TIMEOUT = 30000;
protected RBNBController() {
// get the system host name and append it to the sink name
try {
InetAddress addr = InetAddress.getLocalHost();
String hostname = addr.getHostName();
rbnbSinkName += "@" + hostname;
} catch (UnknownHostException e) {}
state = STATE_DISCONNECTED;
rbnbHostName = DEFAULT_RBNB_HOST_NAME;
rbnbPortNumber = DEFAULT_RBNB_PORT_NUMBER;
requestIsMonitor = false;
dropData = false;
location = System.currentTimeMillis()/1000d;
playbackRate = 1;
timeScale = 1;
requestedChannels = new ChannelMap();
channelManager = new ChannelManager();
metadataManager = new MetadataManager(this);
markerManager = new MarkerManager(this);
timeListeners = new ArrayList<TimeListener>();
stateListeners = new ArrayList<StateListener>();
subscriptionListeners = new ArrayList<SubscriptionListener>();
playbackRateListeners = new ArrayList<PlaybackRateListener>();
timeScaleChangeListeners = new ArrayList<TimeScaleListener>();
messageListeners = new ArrayList<MessageListener>();
connectionListeners = new ArrayList<ConnectionListener>();
run();
}
/**
* Get the single instance of this class.
*
* @return the instance of this class
*/
public static RBNBController getInstance() {
if (instance == null) {
instance = new RBNBController();
}
return instance;
}
private void run() {
rbnbThread = new Thread(new Runnable() {
public void run() {
runRBNB();
}
}, "RBNB");
rbnbThread.start();
}
private void runRBNB() {
log.info("RBNB data thread has started.");
while (state != STATE_EXITING) {
processSubscriptionRequests();
processLocationUpdate();
processTimeScaleUpdate();
processPlaybackRateUpdate();
processStateChangeRequests();
switch (state) {
case STATE_LOADING:
log.warn("You must always manually transition from the loading state.");
changeStateSafe(STATE_STOPPED);
break;
case STATE_PLAYING:
updateDataPlaying();
break;
case STATE_MONITORING:
updateDataMonitoring();
break;
case STATE_STOPPED:
case STATE_DISCONNECTED:
try { Thread.sleep(50); } catch (Exception e) {}
break;
}
}
closeRBNB();
metadataManager.stopUpdating();
log.info("RBNB data thread is exiting.");
}
// State Processing Methods
private void processSubscriptionRequests() {
while (!updateSubscriptionRequests.isEmpty()) {
SubscriptionRequest subscriptionRequest;
synchronized (updateSubscriptionRequests) {
subscriptionRequest = (SubscriptionRequest)updateSubscriptionRequests.remove(0);
}
List<String> channelNames = subscriptionRequest.getChannelNames();
DataListener listener = subscriptionRequest.getListener();
if (subscriptionRequest.isSubscribe()) {
subscribeSafe(channelNames, listener);
} else {
unsubscribeSafe(channelNames, listener);
}
}
}
private void processLocationUpdate() {
if (updateLocation != -1) {
double oldLocation = location;
synchronized (updateLocationLock) {
location = updateLocation;
updateLocation = -1;
}
if (oldLocation == location) {
return;
}
log.info("Setting location to " + DataViewer.formatDate(location) + ".");
if (requestedChannels.NumberOfChannels() > 0) {
changeStateSafe(STATE_LOADING);
double duration;
if (oldLocation < location && oldLocation > location-timeScale) {
duration = location - oldLocation;
} else {
duration = timeScale;
}
loadData(location, duration);
changeStateSafe(STATE_STOPPED);
} else {
updateTimeListeners(location);
}
}
}
private void processTimeScaleUpdate() {
if (updateTimeScale != -1) {
double oldTimeScale = timeScale;
synchronized (updateTimeScaleLock) {
timeScale = updateTimeScale;
updateTimeScale = -1;
}
if (timeScale == oldTimeScale) {
return;
}
log.info("Setting time scale to " + timeScale + ".");
fireTimeScaleChanged(timeScale);
if (timeScale > oldTimeScale && requestedChannels.NumberOfChannels() > 0) {
//TODO make this loading smarter
int originalState = state;
changeStateSafe(STATE_LOADING);
loadData();
if (originalState == STATE_PLAYING) {
changeStateSafe(STATE_PLAYING);
} else if (originalState == STATE_MONITORING) {
changeStateSafe(STATE_MONITORING);
} else {
changeStateSafe(STATE_STOPPED);
}
}
}
}
private void processPlaybackRateUpdate() {
if (updatePlaybackRate != -1) {
double oldPlaybackRate = playbackRate;
synchronized (updatePlaybackRateLock) {
playbackRate = updatePlaybackRate;
updatePlaybackRate = -1;
}
if (playbackRate == oldPlaybackRate) {
return;
}
log.info("Setting playback rate to " + playbackRate + " seconds.");
if (state == STATE_PLAYING) {
getPreFetchChannelMap();
preFetchData(location, playbackRate);
}
firePlaybackRateChanged(playbackRate);
}
}
private void processStateChangeRequests() {
while (!stateChangeRequests.isEmpty()) {
int updateState;
synchronized (stateChangeRequests) {
updateState = ((Integer)stateChangeRequests.remove(0)).intValue();
}
changeStateSafe(updateState);
}
}
private boolean changeStateSafe(int newState) {
if (state == newState) {
log.info("Already in state " + getStateName(state) + ".");
return true;
} else if (state == STATE_PLAYING) {
getPreFetchChannelMap();
} else if (state == STATE_EXITING) {
log.error("Can not transition out of exiting state to " + getStateName(state) + " state.");
return false;
} else if (state == STATE_DISCONNECTED && newState != STATE_EXITING) {
fireConnecting();
try {
initRBNB();
} catch (SAPIException e) {
closeRBNB();
String message = e.getMessage();
// detect nested excpetions
if (message.contains("java.io.InterruptedIOException")) {
log.info("RBNB server connection canceled by user.");
} else {
log.error("Failed to connect to the RBNB server.");
fireErrorMessage("Failed to connect to the RBNB server.");
}
fireConnectionFailed();
return false;
}
metadataManager.startUpdating();
fireConnected();
}
switch (newState) {
case STATE_MONITORING:
if (!monitorData()) {
fireErrorMessage("Stopping real time. Failed to load data from the server. Please try again later.");
return false;
}
break;
case STATE_PLAYING:
preFetchData(location, playbackRate);
break;
case STATE_LOADING:
case STATE_STOPPED:
case STATE_EXITING:
break;
case STATE_DISCONNECTED:
closeRBNB();
metadataManager.stopUpdating();
LocalChannelManager.getInstance().removeAllChannels();
break;
default:
log.error("Unknown state: " + state + ".");
return false;
}
int oldState = state;
state = newState;
notifyStateListeners(state, oldState);
log.info("Transitioned from state " + getStateName(oldState) + " to " + getStateName(state) + ".");
return true;
}
// RBNB Methods
private void initRBNB() throws SAPIException {
if (sink == null) {
sink = new Sink();
} else {
return;
}
sink.OpenRBNBConnection(rbnbHostName + ":" + rbnbPortNumber, rbnbSinkName);
log.info("Connected to RBNB server.");
}
private void closeRBNB() {
if (sink == null) return;
sink.CloseRBNBConnection();
sink = null;
log.info("Connection to RBNB server closed.");
}
private void reInitRBNB() throws SAPIException {
closeRBNB();
initRBNB();
}
// Subscription Methods
private void subscribeSafe(List<String> channelNames, DataListener panel) {
//skip subscription if we are not connected
if (state == STATE_DISCONNECTED) {
return;
}
// a list of channels to load data for
List<String> channelsToLoad = new ArrayList<String>();
//subscribe to channels
for (String channelName : channelNames) {
Channel channel = getChannel(channelName);
// see if this is a local channel and subscribe to its server channels,
// otherwise just subscribe to the channel
if (channel != null && channel instanceof LocalChannel) {
LocalChannel localChannel = (LocalChannel) channel;
List<String> serverChannels = localChannel.getServerChannels();
for (String serverChannel : serverChannels) {
try {
requestedChannels.Add(serverChannel);
} catch (SAPIException e) {
log.error("Failed to subscribe to channel " + serverChannel + ".");
e.printStackTrace();
continue;
}
}
log.info("Subscribed to channel " + channelName + " with server channels: " + serverChannels);
channelsToLoad.addAll(serverChannels);
} else {
try {
requestedChannels.Add(channelName);
} catch (SAPIException e) {
log.error("Failed to subscribe to channel " + channelName + ".");
e.printStackTrace();
continue;
}
log.info("Subscribed to channel " + channelName + ".");
channelsToLoad.add(channelName);
}
//notify channel manager
channelManager.subscribe(channelName, panel);
}
int originalState = state;
changeStateSafe(STATE_LOADING);
loadData(channelsToLoad);
if (originalState == STATE_MONITORING) {
changeStateSafe(STATE_MONITORING);
} else if (originalState == STATE_PLAYING) {
changeStateSafe(STATE_PLAYING);
} else {
changeStateSafe(STATE_STOPPED);
}
for (String channelName : channelNames) {
fireSubscriptionNotification(channelName);
}
}
private void unsubscribeSafe(List<String> channelNames, DataListener panel) {
//skip unsubscription if we are not connected
if (state == STATE_DISCONNECTED) {
return;
}
for (String channelName : channelNames) {
channelManager.unsubscribe(channelName, panel);
log.info("Unsubscribed from channel " + channelName + ".");
}
ChannelMap newRequestedChannels = new ChannelMap();
// recreate the channel map with the subscribed channels
for (String channelName : channelManager.getSubscribedChannels()) {
Channel channel = getChannel(channelName);
// see if this is a local channel and subscribe to its server channels,
// otherwise just subscribe to the channel
if (channel != null && channel instanceof LocalChannel) {
LocalChannel localChannel = (LocalChannel) channel;
List<String> serverChannels = localChannel.getServerChannels();
for (String serverChannel : serverChannels) {
try {
newRequestedChannels.Add(serverChannel);
} catch (SAPIException e) {
log.error("Failed to resubscribe to channel " + serverChannel + ".");
e.printStackTrace();
continue;
}
}
} else {
try {
newRequestedChannels.Add(channelName);
} catch (SAPIException e) {
log.error("Failed to resubscribe to channel " + channelName + ".");
e.printStackTrace();
continue;
}
}
}
requestedChannels = newRequestedChannels;
if (state == STATE_MONITORING) {
monitorData();
}
for (String channelName : channelNames) {
fireUnsubscriptionNotification(channelName);
}
}
// Load Methods
/**
* Load data for all channels.
*/
private void loadData() {
loadData(location, timeScale);
}
/**
* Load data for all channels.
*
* @param location the end time
* @param duration the duration
*/
private void loadData(double location, double duration) {
String[] subscribedChannels = requestedChannels.GetChannelList();
loadData(Arrays.asList(subscribedChannels), location, duration);
}
/**
* Load data for the specified channels.
*
* @param channelNames the names of the channels
*/
private void loadData(List<String> channelNames) {
loadData(channelNames, location, timeScale);
}
/**
* Load data for the specified channels.
*
* @param channelNames a list of channel names
* @param location the end time
* @param duration the amount of data to load
*/
private void loadData(List<String> channelNames, double location, double duration) {
ChannelMap realRequestedChannels = requestedChannels;
ChannelMap imageChannels = new ChannelMap();
ChannelMap tabularChannels = new ChannelMap();
ChannelMap otherChannels = new ChannelMap();
for (String channelName : channelNames) {
try {
if (isVideo(channelName)) {
imageChannels.Add(channelName);
} else if (channelManager.isChannelTabularOnly(channelName)) {
tabularChannels.Add(channelName);
} else {
otherChannels.Add(channelName);
}
} catch (SAPIException e) {
log.error("Failed to add channel " + channelName + ".");
e.printStackTrace();
}
}
if (imageChannels.NumberOfChannels() > 0) {
requestedChannels = imageChannels;
if (!requestData(location, 0)) {
fireErrorMessage("Failed to load data from the server. Please try again later.");
requestedChannels = realRequestedChannels;
changeStateSafe(STATE_STOPPED);
return;
}
updateDataMonitoring();
updateTimeListeners(location);
}
if (tabularChannels.NumberOfChannels() > 0) {
requestedChannels = tabularChannels;
if (!requestData(location-1, 1)) {
fireErrorMessage("Failed to load data from the server. Please try again later.");
requestedChannels = realRequestedChannels;
changeStateSafe(STATE_STOPPED);
return;
}
updateDataMonitoring();
updateTimeListeners(location);
}
if (otherChannels.NumberOfChannels() > 0) {
requestedChannels = otherChannels;
if (!requestData(location-duration, duration)) {
fireErrorMessage("Failed to load data from the server. Please try again later.");
requestedChannels = realRequestedChannels;
changeStateSafe(STATE_STOPPED);
return;
}
updateDataMonitoring();
updateTimeListeners(location);
}
requestedChannels = realRequestedChannels;
log.info("Loaded " + DataViewer.formatSeconds(timeScale) + " of data at " + DataViewer.formatDate(location) + ".");
}
// Playback Methods
private boolean requestData(double location, double duration) {
return requestData(location, duration, true);
}
private boolean requestData(double location, double duration, boolean retry) {
if (requestedChannels.NumberOfChannels() == 0) {
return false;
}
if (requestIsMonitor) {
try {
reInitRBNB();
} catch (SAPIException e) {
requestIsMonitor = true;
return false;
}
requestIsMonitor = false;
}
try {
sink.Request(requestedChannels, location, duration, "absolute");
} catch (SAPIException e) {
log.error("Failed to request channels at " + DataViewer.formatDate(location) + " for " + DataViewer.formatSeconds(duration) + ".");
e.printStackTrace();
requestIsMonitor = true;
if (retry) {
return requestData(location, duration, false);
} else {
return false;
}
}
return true;
}
private synchronized void updateDataPlaying() {
if (requestedChannels.NumberOfChannels() == 0) {
fireStatusMessage("Stopping playback. No channels are selected.");
changeStateSafe(STATE_STOPPED);
return;
}
LocalChannelMap getmap = null;
getmap = getPreFetchChannelMap();
if (getmap == null) {
fireErrorMessage("Stopping playback. Failed to load data from the server. Please try again later.");
changeStateSafe(STATE_STOPPED);
requestIsMonitor = true;
return;
} else if (getmap.GetIfFetchTimedOut()) {
fireErrorMessage("Stopping playback. Failed to load enough data from server. The playback rate may be too fast or the server is busy.");
changeStateSafe(STATE_STOPPED);
return;
}
//stop if no data in fetch and past end time, most likely end of data
if (getmap.NumberOfChannels() == 0 && !moreData(requestedChannels.GetChannelList(), location)) {
log.warn("Received no data. Assuming end of channel.");
changeStateSafe(STATE_STOPPED);
return;
}
preFetchData(location+playbackRate, playbackRate);
channelManager.postData(getmap);
double playbackDuration = playbackRate;
double playbackRefreshRate = PLAYBACK_REFRESH_RATE;
double playbackStepTime = playbackRate * playbackRefreshRate;
long playbackSteps = (long)(playbackDuration / playbackStepTime);
double locationStartTime = location;
long playbackStartTime = System.nanoTime();
long i = 0;
while (i<playbackSteps && stateChangeRequests.size() == 0 && updateLocation == -1 && updateTimeScale == -1 && updatePlaybackRate == -1) {
double timeDifference = (playbackRefreshRate*(i+1)) - ((System.nanoTime() - playbackStartTime)/1000000000d);
if (dropData && timeDifference < -playbackRefreshRate) {
int stepsToSkip = (int)((timeDifference*-1) / playbackRefreshRate);
i += stepsToSkip;
} else if (timeDifference > playbackRefreshRate) {
try { Thread.sleep((long)(timeDifference*1000)); } catch (Exception e) { e.printStackTrace(); }
}
i++;
location = locationStartTime + (playbackStepTime) * i;
updateTimeListeners(location);
}
}
private void preFetchData(final double location, final double duration) {
preFetchChannelMap = null;
preFetchDone = false;
new Thread(new Runnable() {
public void run() {
boolean requestStatus = false;
if (state == STATE_PLAYING) {
requestStatus = requestData(location, duration);
}
if (requestStatus) {
preFetchChannelMap = new LocalChannelMap();
try {
sink.Fetch(LOADING_TIMEOUT, preFetchChannelMap);
} catch (Exception e) {
log.error("Failed to fetch data.");
e.printStackTrace();
}
} else {
preFetchChannelMap = null;
}
synchronized(preFetchLock) {
preFetchDone = true;
preFetchLock.notify();
}
}
}, "prefetch").start();
}
private LocalChannelMap getPreFetchChannelMap() {
synchronized(preFetchLock) {
if (!preFetchDone) {
log.debug("Waiting for pre-fetch channel map.");
try {
preFetchLock.wait();
} catch (Exception e) {
log.error("Failed to wait for channel map.");
e.printStackTrace();
}
log.debug("Done waiting for pre-fetch channel map.");
}
}
LocalChannelMap fetchedMap = preFetchChannelMap;
preFetchChannelMap = null;
return fetchedMap;
}
// Monitor Methods
private boolean monitorData() {
return monitorData(true);
}
private boolean monitorData(boolean retry) {
if (requestedChannels.NumberOfChannels() == 0) {
return true;
}
if (requestIsMonitor) {
try {
reInitRBNB();
} catch (SAPIException e) {
e.printStackTrace();
return false;
}
}
log.debug("Monitoring data after location " + DataViewer.formatDate(location) + ".");
requestIsMonitor = true;
try {
sink.Monitor(requestedChannels, 5);
log.info("Monitoring selected data channels.");
} catch (SAPIException e) {
log.error("Failed to monitor channels.");
e.printStackTrace();
if (retry) {
return monitorData(false);
} else {
return false;
}
}
return true;
}
private void updateDataMonitoring() {
//stop monitoring if no channels selected
if (requestedChannels.NumberOfChannels() == 0) {
fireStatusMessage("Stopping real time. No channels are selected.");
changeStateSafe(STATE_STOPPED);
return;
}
LocalChannelMap getmap = new LocalChannelMap();
long timeout;
if (state == STATE_MONITORING) {
timeout = 500;
} else {
timeout = LOADING_TIMEOUT;
}
try {
sink.Fetch(timeout, getmap);
} catch (Exception e) {
fireErrorMessage("Failed to load data from the server. Please try again later.");
e.printStackTrace();
changeStateSafe(STATE_STOPPED);
requestIsMonitor = true;
return;
}
if (getmap.GetIfFetchTimedOut()) {
if (state == STATE_MONITORING) {
//no data was received, this is not an error and we should go on
//to see if more data is recieved next time around
//TODO see if we should sleep here
log.debug("Fetch timed out for monitor.");
return;
} else {
log.error("Failed to fetch data.");
fireErrorMessage("Failed to load data from the server. Please try again later.");
changeStateSafe(STATE_STOPPED);
return;
}
}
//received no data
if (getmap.NumberOfChannels() == 0) {
return;
}
//post data to listeners
channelManager.postData(getmap);
if (state == STATE_MONITORING) {
//update current location
double newLocation = getLastTime(getmap);
if (newLocation > location) {
location = newLocation;
}
updateTimeListeners(location);
}
}
// Listener Methods
private void updateTimeListeners(double location) {
for (int i=0; i<timeListeners.size(); i++) {
TimeListener timeListener = (TimeListener)timeListeners.get(i);
try {
timeListener.postTime(location);
} catch (Exception e) {
log.error("Failed to post time to " + timeListener + ".");
e.printStackTrace();
}
}
}
private void notifyStateListeners(int state, int oldState) {
for (int i=0; i<stateListeners.size(); i++) {
StateListener stateListener = (StateListener)stateListeners.get(i);
stateListener.postState(state, oldState);
}
}
private void fireSubscriptionNotification(String channelName) {
SubscriptionListener subscriptionListener;
for (int i=0; i<subscriptionListeners.size(); i++) {
subscriptionListener = (SubscriptionListener)subscriptionListeners.get(i);
subscriptionListener.channelSubscribed(channelName);
}
}
private void fireUnsubscriptionNotification(String channelName) {
SubscriptionListener subscriptionListener;
for (int i=0; i<subscriptionListeners.size(); i++) {
subscriptionListener = (SubscriptionListener)subscriptionListeners.get(i);
subscriptionListener.channelUnsubscribed(channelName);
}
}
private void firePlaybackRateChanged(double playbackRate) {
PlaybackRateListener listener;
for (int i=0; i<playbackRateListeners.size(); i++) {
listener = (PlaybackRateListener)playbackRateListeners.get(i);
listener.playbackRateChanged(playbackRate);
}
}
private void fireTimeScaleChanged(double timeScale) {
TimeScaleListener listener;
for (int i=0; i<timeScaleChangeListeners.size(); i++) {
listener = (TimeScaleListener)timeScaleChangeListeners.get(i);
listener.timeScaleChanged(timeScale);
}
}
// Utility Methods
private boolean moreData(String[] channels, double time) {
double endTime = 0;
for (String channelName : channels) {
Channel channel = getChannel(channelName);
if (channel == null) {
continue;
}
double channelEndTime = channel.getStart() + channel.getDuration();
endTime = Math.max(endTime, channelEndTime);
}
return time < endTime;
}
private boolean isVideo(String channelName) {
Channel channel = getChannel(channelName);
if (channel == null) {
return false;
}
String mime = channel.getMetadata("mime");
if (mime != null && mime.equals("image/jpeg")) {
return true;
} else {
return false;
}
}
private static double getLastTime(ChannelMap channelMap) {
double lastTime = -1;
String[] channels = channelMap.GetChannelList();
for (int i=0; i<channels.length; i++) {
String channelName = channels[i];
int channelIndex = channelMap.GetIndex(channelName);
double[] times = channelMap.GetTimes(channelIndex);
double endTime = times[times.length-1];
if (endTime > lastTime) {
lastTime = endTime;
}
}
return lastTime;
}
// Player Methods
public int getState() {
return state;
}
public void monitor() {
if (state != STATE_MONITORING) {
setLocation(System.currentTimeMillis()/1000d);
}
setState(STATE_MONITORING);
}
public void play() {
setState(STATE_PLAYING);
}
public void pause() {
setState(STATE_STOPPED);
}
public void exit() {
setState(STATE_EXITING);
//wait for thread to finish
int count = 0;
while (sink != null && count++ < 20) {
try { Thread.sleep(50); } catch (Exception e) {}
}
}
public void setState(int state) {
synchronized (stateChangeRequests) {
stateChangeRequests.add(new Integer(state));
}
}
public double getLocation() {
return location;
}
public void setLocation(final double location) {
if (location < 0) {
log.error("Location not set; location must be nonnegative.");
return;
}
synchronized (updateLocationLock) {
updateLocation = location;
}
}
public double getPlaybackRate() {
return playbackRate;
}
public void setPlaybackRate(final double playbackRate) {
if (playbackRate <= 0) {
log.error("Playback rate not set; playback rate must be positive.");
return;
}
synchronized (updatePlaybackRateLock) {
updatePlaybackRate = playbackRate;
}
}
public double getTimeScale() {
return timeScale;
}
public void setTimeScale(double timeScale) {
if (timeScale <= 0) {
log.error("Time scale not set; time scale must be positive.");
return;
}
synchronized (updateTimeScaleLock) {
updateTimeScale = timeScale;
}
}
public boolean subscribe(String channelName, DataListener listener) {
synchronized (updateSubscriptionRequests) {
updateSubscriptionRequests.add(new SubscriptionRequest(channelName, listener, true));
}
return true;
}
/**
* Subscribe to the list of <code>channels</code> with the data
* <code>listener</code>.
*
* @param channels the channels to subscribe to
* @param listener the data listener to post data to
*/
public void subscribe(List<String> channels, DataListener listener) {
synchronized (updateSubscriptionRequests) {
updateSubscriptionRequests.add(new SubscriptionRequest(channels, listener, true));
}
}
public boolean unsubscribe(String channelName, DataListener listener) {
synchronized (updateSubscriptionRequests) {
updateSubscriptionRequests.add(new SubscriptionRequest(channelName, listener, false));
}
return true;
}
/**
* Unsubscribe from the list of <code>channels</code> for the data
* <code>listener</code>.
*
* @param channels the list of channels to unsubscribe from
* @param listener the data listener to unsubscribe
*/
public void unsubscribe(List<String> channels, DataListener listener) {
synchronized (updateSubscriptionRequests) {
updateSubscriptionRequests.add(new SubscriptionRequest(channels, listener, false));
}
}
public boolean isSubscribed(String channelName) {
return channelManager.isChannelSubscribed(channelName);
}
/**
* Returns true if there is at least one listener subscribed to a channel.
*
* @return true if there are channel listener, false if there are none
*/
public boolean hasSubscribedChannels() {
return channelManager.hasSubscribedChannels();
}
public void addStateListener(StateListener stateListener) {
stateListener.postState(state, state);
stateListeners.add(stateListener);
}
public void removeStateListener(StateListener stateListener) {
stateListeners.remove(stateListener);
}
public void addTimeListener(TimeListener timeListener) {
timeListeners.add(timeListener);
timeListener.postTime(location);
}
public void removeTimeListener(TimeListener timeListener) {
timeListeners.remove(timeListener);
}
public void addPlaybackRateListener(PlaybackRateListener listener) {
listener.playbackRateChanged(playbackRate);
playbackRateListeners.add(listener);
}
public void removePlaybackRateListener(PlaybackRateListener listener) {
playbackRateListeners.remove(listener);
}
public void addTimeScaleListener(TimeScaleListener listener) {
listener.timeScaleChanged(timeScale);
timeScaleChangeListeners.add(listener);
}
public void removeTimeScaleListener(TimeScaleListener listener) {
timeScaleChangeListeners.remove(listener);
}
// Public Methods
public void dropData(boolean dropData) {
this.dropData = dropData;
}
public String getRBNBHostName() {
return rbnbHostName;
}
public void setRBNBHostName(String rbnbHostName) {
this.rbnbHostName = rbnbHostName;
}
public int getRBNBPortNumber() {
return rbnbPortNumber;
}
public void setRBNBPortNumber(int rbnbPortNumber) {
this.rbnbPortNumber = rbnbPortNumber;
}
public String getRBNBConnectionString() {
return rbnbHostName + ":" + rbnbPortNumber;
}
/**
* Gets the name of the server. If there is no active connection, null is
* returned.
*
* @return the name of the server, or null if there is no connection
*/
public String getServerName() {
if (sink == null) {
return null;
}
String serverName;
try {
serverName = sink.GetServerName();
// strip out the leading slash that is there for some reason
if (serverName.startsWith("/") && serverName.length() >= 2) {
serverName = serverName.substring(1);
}
} catch (IllegalStateException e) {
serverName = null;
}
return serverName;
}
public boolean isConnected() {
return sink != null;
}
public void connect() {
connect(false);
}
public boolean connect(boolean block) {
if (isConnected()) {
return true;
}
if (block) {
final Thread object = Thread.currentThread();
ConnectionListener listener = new ConnectionListener() {
public void connecting() {}
public void connected() {
synchronized (object) {
object.notify();
}
}
public void connectionFailed() {
object.interrupt();
}
};
addConnectionListener(listener);
synchronized (object) {
setState(STATE_STOPPED);
try {
object.wait();
} catch (InterruptedException e) {
return false;
}
}
removeConnectionListener(listener);
} else {
setState(STATE_STOPPED);
}
return true;
}
/**
* Cancel a connection attempt.
*/
public void cancelConnect() {
if (rbnbThread != null) {
rbnbThread.interrupt();
}
}
/**
* Disconnect from the RBNB server. This method will return immediately.
*/
public void disconnect() {
disconnect(false);
}
/**
* Disconnect from the RBNB server. If block is set, this method will not
* return until the server has disconnected.
*
* @param block if true, wait for the server to disconnect
* @return true if the server disconnected
*/
public boolean disconnect(boolean block) {
if (!isConnected()) {
return true;
}
if (block) {
final Thread object = Thread.currentThread();
StateListener listener = new StateListener() {
public void postState(int newState, int oldState) {
if (newState == STATE_DISCONNECTED) {
synchronized (object) {
object.notify();
}
}
}
};
addStateListener(listener);
synchronized (object) {
setState(STATE_DISCONNECTED);
try {
object.wait();
} catch (InterruptedException e) {
return false;
}
}
removeStateListener(listener);
} else {
setState(STATE_DISCONNECTED);
}
return true;
}
public void reconnect() {
setState(STATE_DISCONNECTED);
setState(STATE_STOPPED);
}
public void addSubscriptionListener(SubscriptionListener subscriptionListener) {
subscriptionListeners.add(subscriptionListener);
}
public void removeSubscriptionListener(SubscriptionListener subscriptionListener) {
subscriptionListeners.remove(subscriptionListener);
}
//Message Methods
private void fireErrorMessage(String errorMessage) {
for (int i=0; i<messageListeners.size(); i++) {
MessageListener messageListener = (MessageListener)messageListeners.get(i);
messageListener.postError(errorMessage);
}
}
private void fireStatusMessage(String statusMessage) {
for (int i=0; i<messageListeners.size(); i++) {
MessageListener messageListener = (MessageListener)messageListeners.get(i);
messageListener.postStatus(statusMessage);
}
}
public void addMessageListener(MessageListener messageListener) {
messageListeners.add(messageListener);
}
public void removeMessageListener(MessageListener messageListener) {
messageListeners.remove(messageListener);
}
// Connection Listener Methods
private void fireConnecting() {
for (int i=0; i<connectionListeners.size(); i++) {
ConnectionListener connectionListener = (ConnectionListener)connectionListeners.get(i);
connectionListener.connecting();
}
}
private void fireConnected() {
for (int i=0; i<connectionListeners.size(); i++) {
ConnectionListener connectionListener = (ConnectionListener)connectionListeners.get(i);
connectionListener.connected();
}
}
private void fireConnectionFailed() {
for (int i=0; i<connectionListeners.size(); i++) {
ConnectionListener connectionListener = (ConnectionListener)connectionListeners.get(i);
connectionListener.connectionFailed();
}
}
public void addConnectionListener(ConnectionListener connectionListener) {
connectionListeners.add(connectionListener);
}
public void removeConnectionListener(ConnectionListener connectionListener) {
connectionListeners.remove(connectionListener);
}
//Public Metadata Methods
/**
* Gets the <code>MetadataManager</code>.
*
* @return the metadata manager
*/
public MetadataManager getMetadataManager() {
return metadataManager;
}
/**
* Gets the <code>Channel</code> with this <code>channelName</code>. This is
* a convenience method for the same method in <code>MetadataManager</code>.
*
* @param channelName the name of the channel
* @return the channel or null if it was not found
* @see MetadataManager#getChannel(String)
*/
public Channel getChannel(String channelName) {
return metadataManager.getChannel(channelName);
}
/**
* Gets a list of <code>Channel<code>'s. This is a convenience method for the
* same method in <code>MetadataManager</code>.
*
* @return a list of channels
* @see MetadataManager#getChannels()
*/
public List<Channel> getChannels() {
return metadataManager.getChannels();
}
/**
* Gets a list of <code>Channel</code>'s with the <code>channelNames</code>.
* This is a convenience method for the same method in
* <code>MetadataManager</code>.
*
* @param channelNames the channel names to get
* @return a list of channels
* @see MetadataManager#getChannels(List)
*/
public List<Channel> getChannels(List<String> channelNames) {
return metadataManager.getChannels(channelNames);
}
/**
* Gets the <code>ChannelTree</code>. This is a convenience method for the
* same method in <code>MetadataManager</code>.
*
* @return the channel tree
* @see MetadataManager#getChannelTree()
*/
public ChannelTree getChannelTree() {
return metadataManager.getChannelTree();
}
/**
* Updates the metadata. This is a convenience method for the same method in
* <code>MetadataManager</code>.
*
* @see MetadataManager#updateMetadataBackground()
*/
public void updateMetadata() {
metadataManager.updateMetadataBackground();
}
//Public Marker Methods
public MarkerManager getMarkerManager() {
return markerManager;
}
//Public Static Methods
public static String getStateName(int state) {
String stateString;
switch (state) {
case STATE_LOADING:
stateString = "loading";
break;
case STATE_PLAYING:
stateString = "playing";
break;
case STATE_MONITORING:
stateString = "real time";
break;
case STATE_STOPPED:
stateString = "stopped";
break;
case STATE_EXITING:
stateString = "exiting";
break;
case STATE_DISCONNECTED:
stateString = "disconnected";
break;
default:
stateString = "UNKNOWN";
}
return stateString;
}
/**
* Returns the state code for a given state name
*
* @param stateName the state name
* @return the state code
*/
public static int getState(String stateName) {
int code;
if (stateName.equals("loading")) {
code = STATE_LOADING;
} else if (stateName.equals("playing")) {
code = STATE_PLAYING;
} else if (stateName.equals("real time")) {
code = STATE_MONITORING;
} else if (stateName.equals("stopped")) {
code = STATE_STOPPED;
} else if (stateName.equals("exiting")) {
code = STATE_EXITING;
} else if (stateName.equals("disconnected")) {
code = STATE_DISCONNECTED;
} else {
code = -1;
}
return code;
}
class SubscriptionRequest {
private List<String> channelNames;
private DataListener listener;
private boolean isSubscribe;
public SubscriptionRequest(String channelName, DataListener listener, boolean isSubscribe) {
this(Collections.singletonList(channelName), listener, isSubscribe);
}
public SubscriptionRequest(List<String> channelNames, DataListener listener, boolean isSubscribe) {
this.channelNames = channelNames;
this.listener = listener;
this.isSubscribe = isSubscribe;
}
public List<String> getChannelNames() {
return channelNames;
}
public DataListener getListener() {
return listener;
}
public boolean isSubscribe() {
return isSubscribe;
}
}
}
| |
/*
* Copyright (C) 2016 njacinto.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package net.nfpj.webcounter.service;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import net.nfpj.webcounter.dm.CounterDM;
import net.nfpj.webcounter.exceptions.DuplicatedCounterException;
import net.nfpj.webcounter.model.Counter;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Before;
import org.mockito.Mockito;
/**
*
* @author njacinto
*/
public class CounterServiceImplTest {
private CounterDM counterDM;
public CounterServiceImplTest() {
}
@Before
public void before(){
counterDM = Mockito.mock(CounterDM.class);
}
@Test
public void testCount() {
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.count()).thenReturn(1);
int result = instance.count();
assertEquals(1, result);
}
@Test
public void testCountEmpty() {
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.count()).thenReturn(0);
int result = instance.count();
assertEquals(0, result);
}
@Test(expected = NullPointerException.class)
public void testContainsNullName() {
String name = null;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.contains(name)).thenThrow(NullPointerException.class);
boolean result = instance.contains(name);
}
@Test
public void testContainsMissing() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.contains(name)).thenReturn(false);
boolean result = instance.contains(name);
assertFalse(result);
}
@Test
public void testContains() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.contains(name)).thenReturn(true);
boolean result = instance.contains(name);
assertTrue(result);
}
/**
* Test of getAll method, of class CounterServiceImpl.
*/
@Test
public void testGetAll() {
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
List<Counter> counters = Arrays.asList(
new Counter("C1", 1), new Counter("C2", 2), new Counter("C3", 3));
Mockito.when(counterDM.getAll()).thenReturn(counters);
Iterable<Counter> result = instance.getAll();
assertNotNull(result);
Iterator<Counter> it = result.iterator();
int count = 0;
while(it.hasNext()){
Counter c = it.next();
assertTrue("Not found: "+c.toString(), counters.contains(c));
count++;
}
assertEquals(counters.size(), count);
}
@Test
public void testGetAllEmpty() {
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.getAll()).thenReturn(Collections.EMPTY_LIST);
Iterable<Counter> result = instance.getAll();
assertNotNull(result);
assertFalse(result.iterator().hasNext());
}
/**
* Test of get method, of class CounterServiceImpl.
*/
@Test
public void testGet() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 0);
Mockito.when(counterDM.get(name)).thenReturn(expResult);
Counter result = instance.get(name);
assertEquals(expResult, result);
}
@Test
public void testGetMissing() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = null;
Mockito.when(counterDM.get(name)).thenReturn(expResult);
Counter result = instance.get(name);
assertEquals(expResult, result);
}
@Test(expected = NullPointerException.class)
public void testGetNullName() {
String name = null;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.get(name)).thenThrow(NullPointerException.class);
Counter result = instance.get(name);
}
/**
* Test of create method, of class CounterServiceImpl.
*/
@Test
public void testCreate() throws DuplicatedCounterException {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 0);
Mockito.when(counterDM.create(name)).thenReturn(expResult);
Counter result = instance.create(name);
assertEquals(expResult, result);
}
@Test(expected = DuplicatedCounterException.class)
public void testCreateExisting() throws DuplicatedCounterException {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.create(name)).thenThrow(DuplicatedCounterException.class);
Counter result = instance.create(name);
}
@Test(expected = NullPointerException.class)
public void testCreateNullName() throws DuplicatedCounterException {
String name = null;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.create(name)).thenThrow(NullPointerException.class);
Counter result = instance.create(name);
}
/**
* Test of remove method, of class CounterServiceImpl.
*/
@Test
public void testRemove() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 0);
Mockito.when(counterDM.remove(name)).thenReturn(expResult);
Counter result = instance.remove(name);
assertEquals(expResult, result);
Mockito.when(counterDM.remove(name)).thenReturn(null);
result = instance.remove(name);
assertNull(result);
}
@Test
public void testRemoveMissing() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = null;
Mockito.when(counterDM.remove(name)).thenReturn(expResult);
Counter result = instance.remove(name);
assertEquals(expResult, result);
}
@Test(expected = NullPointerException.class)
public void testRemoveNullName() {
String name = "";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = null;
Mockito.when(counterDM.remove(name)).thenThrow(NullPointerException.class);
Counter result = instance.remove(name);
}
/**
* Test of increment method, of class CounterServiceImpl.
*/
@Test
public void testIncrement() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 1);
Mockito.when(counterDM.increment(name)).thenReturn(expResult);
Counter result = instance.increment(name);
assertEquals(expResult, result);
}
@Test
public void testIncrementMissing() {
String name = "C1";
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = null;
Mockito.when(counterDM.increment(name)).thenReturn(expResult);
Counter result = instance.increment(name);
assertEquals(expResult, result);
}
@Test(expected = NullPointerException.class)
public void testIncrementNullName() {
String name = null;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.increment(name)).thenThrow(NullPointerException.class);
Counter result = instance.increment(name);
}
/**
* Test of increment method, of class CounterServiceImpl.
*/
@Test
public void testDontCreateAndIncrementMissing() {
String name = "C1";
boolean createIfNotExist = false;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = null;
Mockito.when(counterDM.increment(name, createIfNotExist)).thenReturn(expResult);
Counter result = instance.increment(name, createIfNotExist);
assertEquals(expResult, result);
}
@Test
public void testDontCreateAndIncrement() {
String name = "C1";
boolean createIfNotExist = false;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 1);
Mockito.when(counterDM.increment(name, createIfNotExist)).thenReturn(expResult);
Counter result = instance.increment(name, createIfNotExist);
assertEquals(expResult, result);
}
@Test
public void testCreateAndIncrement() {
String name = "C1";
boolean createIfNotExist = true;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Counter expResult = new Counter(name, 1);
Mockito.when(counterDM.increment(name, createIfNotExist)).thenReturn(expResult);
Counter result = instance.increment(name, createIfNotExist);
assertEquals(expResult, result);
}
@Test(expected = NullPointerException.class)
public void testCreateAndIncrementNullName() {
String name = null;
boolean createIfNotExist = true;
CounterServiceImpl instance = new CounterServiceImpl(counterDM);
Mockito.when(counterDM.increment(name, createIfNotExist)).thenThrow(NullPointerException.class);
Counter result = instance.increment(name, createIfNotExist);
}
}
| |
package com.suse.saltstack.netapi.parser;
import com.google.gson.JsonParseException;
import com.suse.saltstack.netapi.datatypes.Arguments;
import com.suse.saltstack.netapi.datatypes.Job;
import com.suse.saltstack.netapi.datatypes.ScheduledJob;
import com.suse.saltstack.netapi.datatypes.Keys;
import com.suse.saltstack.netapi.datatypes.cherrypy.Applications;
import com.suse.saltstack.netapi.datatypes.cherrypy.HttpServer;
import com.suse.saltstack.netapi.datatypes.cherrypy.Request;
import com.suse.saltstack.netapi.datatypes.cherrypy.ServerThread;
import com.suse.saltstack.netapi.datatypes.cherrypy.Stats;
import com.suse.saltstack.netapi.results.Result;
import com.suse.saltstack.netapi.datatypes.Token;
import java.util.Date;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
import org.junit.Test;
import java.io.InputStream;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
/**
* Json parser unit tests.
*/
public class JsonParserTest {
@Test
public void testSaltStackJobParser() throws Exception {
InputStream is = getClass().getResourceAsStream("/minions_response.json");
Result<List<ScheduledJob>> result = JsonParser.SCHEDULED_JOB.parse(is);
assertNotNull("failed to parse", result);
String jid = result.getResult().get(0).getJid();
assertEquals("unable to parse jid", "20150211105524392307", jid);
}
@Test
public void testSaltStackStringParser() throws Exception {
InputStream is = getClass().getResourceAsStream("/logout_response.json");
Result<String> result = JsonParser.STRING.parse(is);
assertNotNull(result);
}
@Test
public void testSaltStackTokenParser() throws Exception {
InputStream is = getClass().getResourceAsStream("/login_response.json");
Result<List<Token>> result = JsonParser.TOKEN.parse(is);
assertNotNull(result);
assertEquals("user", result.getResult().get(0).getUser());
assertEquals("auto", result.getResult().get(0).getEauth());
String token = result.getResult().get(0).getToken();
assertEquals("f248284b655724ca8a86bcab4b8df608ebf5b08b", token);
assertEquals(new Date(1423573511380L), result.getResult().get(0).getStart());
assertEquals(new Date(1423616711380L), result.getResult().get(0).getExpire());
}
@Test(expected = JsonParseException.class)
public void testSaltStackTokenParserWrongDate() throws Exception {
InputStream is = getClass().getResourceAsStream("/login_response_wrong_date.json");
JsonParser.TOKEN.parse(is);
}
@Test
public void testSaltStackTokenParserDateMissing() throws Exception {
InputStream is = getClass()
.getResourceAsStream("/login_response_missing_date.json");
Token token = JsonParser.TOKEN.parse(is).getResult().get(0);
assertNull(token.getStart());
assertNull(token.getExpire());
}
@Test
public void testStatsParser() throws Exception {
InputStream is = this.getClass().getResourceAsStream("/stats_response.json");
Stats result = JsonParser.STATS.parse(is);
assertNotNull(result);
Applications applications = result.getApplications();
assertNotNull(applications);
assertEquals(47.72178888320923, applications.getUptime(), 0);
assertEquals(1.1315585135535497, applications.getReadsPerSecond(), 0);
assertEquals(new Date(1425821785119L), applications.getCurrentTime());
assertEquals("3.6.0", applications.getServerVersion());
assertEquals(0.06533312797546387, applications.getTotalTime(), 0);
assertEquals(true, applications.isEnabled());
assertEquals(new Date(1425821737397L), applications.getStartTime());
assertEquals(3.918541522794187, applications.getWritesPerSecond(), 0);
assertEquals(54, applications.getTotalBytesRead());
assertEquals(1, applications.getCurrentRequests());
assertEquals(2, applications.getTotalRequests());
assertEquals(27.0, applications.getReadsPerRequest(), 0);
assertEquals(187, applications.getTotalBytesWritten());
assertEquals(0.04190954295959868, applications.getRequestsPerSecond(), 0);
assertEquals(93.5, applications.getWritesPerRequest(), 0);
Request req1 = applications.getRequests().get("140691837540096");
assertEquals(new Integer(54), req1.getBytesRead());
assertEquals(new Integer(187), req1.getBytesWritten());
assertEquals("200 OK", req1.getResponeStatus());
assertEquals(new Date(1425821772580L), req1.getStartTime());
assertEquals(new Date(1425821772645L), req1.getEndTime());
assertEquals("127.0.0.1:45009", req1.getClient());
assertEquals(0.06533312797546387, req1.getProcessingTime(), 0);
assertEquals("POST /login HTTP/1.1", req1.getRequestLine());
Request req2 = applications.getRequests().get("140691829147392");
assertEquals(null, req2.getBytesRead());
assertEquals(null, req2.getBytesWritten());
assertEquals(null, req2.getResponeStatus());
assertEquals(new Date(1425821785119L), req2.getStartTime());
assertEquals(null, req2.getEndTime());
assertEquals("127.0.0.1:45015", req2.getClient());
assertEquals(0.0002930164337158203, req2.getProcessingTime(), 0);
assertEquals("GET /stats HTTP/1.1", req2.getRequestLine());
HttpServer server = result.getHttpServer();
assertNotNull(server);
assertEquals(-1, server.getBytesRead());
assertEquals(0.0, server.getAcceptsPerSecond(), 0);
assertEquals(2, server.getSocketErrors());
assertEquals(3, server.getAccepts());
assertEquals(99, server.getThreadsIdle());
assertEquals(false, server.isEnable());
assertEquals("('0.0.0.0', 8000)", server.getBindAddress());
assertEquals(4, server.getReadThroughput());
assertEquals(5, server.getQueue());
assertEquals(6, server.getRunTime());
assertEquals(3, server.getThreads());
assertEquals(7, server.getBytesWritten());
assertEquals(8, server.getRequests());
assertEquals(9.0, server.getWorkTime(), 0);
assertEquals(10, server.getWriteThroughput(), 0);
for (int i = 0; i < server.getThreads(); i++) {
ServerThread thread = server.getWorkerThreads().get("CP Server Thread-" + i);
assertEquals(0, thread.getBytesRead());
assertEquals(2, thread.getBytesWritten());
assertEquals(3.4, thread.getReadThroughput(), 0);
assertEquals(5, thread.getRequests());
assertEquals(6, thread.getWorkTime(), 0);
assertEquals(7.8, thread.getWriteThroughput(), 0);
}
}
@Test
public void testKeysParser() throws Exception {
InputStream is = getClass().getResourceAsStream("/keys_response.json");
Result<Keys> result = JsonParser.KEYS.parse(is);
Keys keys = result.getResult();
assertNotNull("failed to parse", result);
assertEquals(Arrays.asList("master.pem", "master.pub"), keys.getLocal());
assertEquals(Arrays.asList("m1"), keys.getMinions());
assertEquals(Arrays.asList("m2"), keys.getUnacceptedMinions());
assertEquals(Arrays.asList("m3"), keys.getRejectedMinions());
}
@Test
public void testSaltStackJobsWithArgsParser() throws Exception {
InputStream is = this.getClass().getResourceAsStream("/jobs_response.json");
Result<List<Map<String, Job>>> result = JsonParser.JOBS.parse(is);
assertNotNull("failed to parse", result);
Map<String, Job> jobs = result.getResult().get(0);
Job job = jobs.get("20150304200110485012");
assertNotNull(job);
Arguments expectedArgs = new Arguments();
expectedArgs.getArgs().add("enable-autodestruction");
assertEquals(expectedArgs.getArgs(), job.getArguments().getArgs());
assertEquals(expectedArgs.getKwargs(), job.getArguments().getKwargs());
assertEquals("test.echo", job.getFunction());
assertEquals("*", job.getTarget());
assertEquals("glob", job.getTargetType());
assertEquals("chuck", job.getUser());
}
@Test
public void testSaltStackJobsWithKwargsParser() throws Exception {
InputStream is = this.getClass().getResourceAsStream("/jobs_response_kwargs.json");
Result<List<Map<String, Job>>> result = JsonParser.JOBS.parse(is);
assertNotNull("failed to parse", result);
Map<String, Job> jobs = result.getResult().get(0);
Job job = jobs.get("20150306023815935637");
assertNotNull(job);
Arguments expectedArgs = new Arguments();
expectedArgs.getArgs().add("i3");
expectedArgs.getArgs().add(true);
expectedArgs.getKwargs().put("sysupgrade", true);
expectedArgs.getKwargs().put("otherkwarg", 42.5);
assertEquals(expectedArgs.getArgs(), job.getArguments().getArgs());
assertEquals(expectedArgs.getKwargs(), job.getArguments().getKwargs());
assertEquals("pkg.install", job.getFunction());
assertEquals("*", job.getTarget());
assertEquals("glob", job.getTargetType());
assertEquals("lucid", job.getUser());
}
@Test
public void testSaltStackJobsWithArgsAsKwargsParser() throws Exception {
InputStream is = this.getClass()
.getResourceAsStream("/jobs_response_args_as_kwargs.json");
Result<List<Map<String, Job>>> result = JsonParser.JOBS.parse(is);
Map<String, Job> jobs = result.getResult().get(0);
Job job = jobs.get("20150315163041425361");
Arguments expectedArgs = new Arguments();
Map<String, Object> arg = new LinkedHashMap<String, Object>() {
{
put("refresh", true);
}
};
expectedArgs.getArgs().add(arg);
arg = new LinkedHashMap<String, Object>() {
{
put("somepar", 123.3);
put("__kwarg__", false);
}
};
expectedArgs.getArgs().add(arg);
arg = new LinkedHashMap<String, Object>() {
{
put("nullparam", null);
put("__kwarg__", null);
}
};
expectedArgs.getArgs().add(arg);
arg = new LinkedHashMap<String, Object>() {
{
put("otherparam", true);
put("__kwarg__", 123.0);
}
};
expectedArgs.getArgs().add(arg);
expectedArgs.getArgs().add("i3");
expectedArgs.getKwargs().put("sysupgrade", true);
assertEquals(expectedArgs.getArgs(), job.getArguments().getArgs());
assertEquals(expectedArgs.getKwargs(), job.getArguments().getKwargs());
}
@Test
public void testSaltStackJobsMultipleKwargs() throws Exception {
InputStream is = this.getClass()
.getResourceAsStream("/jobs_response_multiple_kwarg.json");
Result<List<Map<String, Job>>> result = JsonParser.JOBS.parse(is);
Map<String, Job> jobs = result.getResult().get(0);
Job job = jobs.get("20150306023815935637");
Arguments expectedArgs = new Arguments();
expectedArgs.getKwargs().put("multi", false);
assertEquals(expectedArgs.getKwargs(), job.getArguments().getKwargs());
assertEquals(0, job.getArguments().getArgs().size());
assertEquals(1, job.getArguments().getKwargs().size());
}
}
| |
package de.bluebiz.bluelytics.api.query.plan.transport;
import de.bluebiz.bluelytics.api.Engine;
import de.bluebiz.bluelytics.api.common.EngineConstants;
import de.bluebiz.bluelytics.api.query.plan.Operator;
import de.bluebiz.bluelytics.api.query.plan.operators.OperatorFactory;
import de.bluebiz.bluelytics.api.query.plan.options.OperatorOptionList;
import de.bluebiz.bluelytics.api.query.plan.options.OperatorOptionString;
import de.bluebiz.bluelytics.api.query.plan.source.Transport;
import de.bluebiz.bluelytics.api.query.plan.source.Wrapper;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* Factory for common used transporters like csv file, rest source or files
*/
public class TransporterSinkFactory {
private final Operator inputOperator;
private Engine engine;
/**
* Instantiates a new Transporter sink factory.
*
* @param engine the engine
* @param inputOperator the input operator
*/
public TransporterSinkFactory(Engine engine, Operator inputOperator) {
this.engine = engine;
this.inputOperator = inputOperator;
}
/**
* Sink operator.
*
* @param <R> the type parameter
* @param name the name
* @return the operator
*/
public <R> Operator<R> sink(String name){
return sink(EngineConstants.SPACE_DEFAULT, name);
}
/**
* Sink operator.
*
* @param <R> the type parameter
* @param space the space
* @param name the name
* @return the operator
*/
public <R> Operator<R> sink(String space, String name){
Operator source = OperatorFactory.createSink(engine, space, name);
return source;
}
/**
* Generic transporter.
*
* @param engine the engine
* @param inputOperator the input operator
* @param transport the transport
* @param wrapper the wrapper
* @param options the options
* @return the transporter
*/
public static TransporterSink generic(Engine engine, Operator inputOperator, Transport transport, Wrapper wrapper, Option... options ) {
return generic(engine, inputOperator, transport.getName(), wrapper, options);
}
/**
* Generic transporter.
*
* @param engine the engine
* @param inputOperator the input operator
* @param genericTransporter the generic transporter
* @param wrapper the wrapper
* @param options the options
* @return the transporter
*/
public static TransporterSink generic(Engine engine, Operator inputOperator, String genericTransporter, Wrapper wrapper, Option... options) {
Operator sender = OperatorFactory.createSender(engine, genericTransporter);
sender.addOption("wrapper", wrapper.getName());
if (options != null && options.length > 0) {
List<OperatorOptionList> optionList = new ArrayList<>();
for (Option option : options) {
OperatorOptionList entry = new OperatorOptionList(new OperatorOptionString(option.getName()), new OperatorOptionString(option.getValue()));
optionList.add(entry);
}
sender.addOption("options", optionList);
}
return new TransporterSink(sender, inputOperator);
}
private static TransporterSink prepare(Engine engine, Operator inputOperator, Transport transport, Wrapper wrapper) {
return prepare(engine, inputOperator, transport.getName(), wrapper);
}
private static TransporterSink prepare(Engine engine, Operator inputOperator, String genericTransporter, Wrapper wrapper) {
Operator sender = OperatorFactory.createSender(engine,genericTransporter);
sender.addOption("wrapper", wrapper.getName());
return new TransporterSink(sender, inputOperator);
}
/**
* File transporter.
*
* @param filename the filename
* @return the transporter
*/
public TransporterSink file(String filename) {
return generic(engine, this.inputOperator, Transport.File, Wrapper.Push, new Option("filename", filename));
}
/**
* File transporter.
* <p>
* If append is {@code true}, the data will be appended
*
* @param filename the filename
* @param append the append
* @return the transporter
*/
public TransporterSink file(String filename, boolean append) {
return generic(engine, this.inputOperator, Transport.File, Wrapper.Push, new Option("filename", filename), new Option("append", append));
}
/**
* Http transporter.
*
* @param uri the uri
* @return the transporter
*/
public TransporterSink http(String uri) {
TransporterSink tp = prepare(engine, inputOperator, Transport.HTTP, Wrapper.Pull);
tp.withOption("uri", uri);
return tp;
}
/**
* Http transporter sink.
*
* @param uri the uri
* @param delayInMillis the delay in millis
* @return the transporter sink
*/
public TransporterSink http(String uri, long delayInMillis) {
TransporterSink tp = prepare(engine, inputOperator, Transport.HTTP, Wrapper.Pull);
tp.withOption("uri", uri);
tp.withOption("delay", delayInMillis);
return tp;
}
/**
* Creates a HTTP request for the given uri.
* The method must be a valid HTTP method: GET, POST, PUT, DELETE or HEAD.
*
* @param uri the uri to fetch
* @param method the http method
* @return the transporter
*/
public TransporterSink http(String uri, String method) {
TransporterSink tp = http(uri);
tp.withOption("method", method);
return tp;
}
/**
* Creates a HTTP request for the given uri.
* The method must be a valid HTTP method: GET, POST, PUT, DELETE or HEAD.
*
* @param uri the uri to fetch
* @param method the http method
* @param delayInMillis the delay in millis
* @return the transporter
*/
public TransporterSink http(String uri, String method, long delayInMillis) {
TransporterSink tp = http(uri);
tp.withOption("method", method);
tp.withOption("delay", delayInMillis);
return tp;
}
/**
* Allows to communicate with an external MQTT broker.
* Bluelytics also has an built-in MQTT broker, see {@link #mqtt(String)}
* <p>
* The broker should be something like tcp://example.com:1883
* A client ID is automatically generated.
*
* @param topic the topic to listen
* @param broker the url of the broker
* @return the transporter
*/
public TransporterSink mqtt(String topic, String broker) {
String clientId = "bluelytics_client_" + UUID.randomUUID().toString();
return mqtt(topic, broker, clientId);
}
/**
* Allows to communicate with an external MQTT broker.
* Bluelytics also has an built-in MQTT broker.
* <p>
* The broker should be something like tcp://example.com:1883
* A client id should be unique.
*
* @param topic the topic
* @param broker the broker
* @param clientId the client id
* @return the transporter
*/
public TransporterSink mqtt(String topic, String broker, String clientId) {
TransporterSink tp = prepare(engine, inputOperator, Transport.MQTT, Wrapper.Push);
tp.withOption("topic", topic);
tp.withOption("broker", broker);
tp.withOption("client_id", clientId);
return tp;
}
/**
* Creates a MQTT client that listens on the built-in MQTT broker
*
* @param topic the topic to listen to
* @return transporter sink
*/
public TransporterSink mqtt(String topic) {
TransporterSink tp = prepare(engine, inputOperator, Transport.MQTT, Wrapper.Push);
tp.withOption("topic", topic);
return tp;
}
}
| |
/**
* $Id$
* $URL$
* JdbcGeneralGenericDao.java - genericdao - Apr 26, 2008 4:33:33 PM - azeckoski
**************************************************************************
* Copyright (c) 2008 Aaron Zeckoski
* Licensed under the Apache License, Version 2
*
* A copy of the Apache License, Version 2 has been included in this
* distribution and is available at: http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Aaron Zeckoski (azeckoski@gmail.com) (aaronz@vt.edu) (aaron@caret.cam.ac.uk)
*/
package org.sakaiproject.genericdao.springjdbc;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.sql.DataSource;
import org.azeckoski.reflectutils.ArrayUtils;
import org.azeckoski.reflectutils.ReflectUtils;
import org.sakaiproject.genericdao.api.GeneralGenericDao;
import org.sakaiproject.genericdao.api.mappers.DataMapper;
import org.sakaiproject.genericdao.api.mappers.NamesRecord;
import org.sakaiproject.genericdao.api.mappers.StatementMapper;
import org.sakaiproject.genericdao.api.translators.DatabaseTranslator;
import org.sakaiproject.genericdao.springjdbc.translators.BasicTranslator;
import org.sakaiproject.genericdao.util.JDBCUtils;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
/**
* A spring JDBC based implementation of {@link GeneralGenericDao}
* which can be extended to add more specialized DAO methods.
* This should meet most DAO needs.
* <p>
* See the overview for installation/usage tips.
*
* @author Aaron Zeckoski (azeckoski@gmail.com)
*/
public class JdbcGeneralGenericDao extends JdbcBasicGenericDao implements GeneralGenericDao {
/**
* Default constructor - does nothing and leaves the object in an incomplete state,
* you need to at least set the following:
* {@link #setDataSource(DataSource)} <br/>
* {@link #setAutoDDL(boolean)} <br/>
* {@link #setAutoCommitDDL(boolean)} <br/>
* {@link #setDatabaseType(String)} <br/>
* {@link #setDataMappers(List)} <br/>
* <br/>
* This does not actually start the DAO, run {@link #startup()} to start it <br/>
* Note that this will be started automatically by Spring if this is created as a Spring bean,
* no actions are necessary and setting an init method is not needed
*/
public JdbcGeneralGenericDao() {
super();
}
/**
* Complete constructor, sets all required values for running the DAO,
* does not actually start the DAO, run {@link #startup()} to start it <br/>
* Note that this will be started automatically by Spring if this is created as a Spring bean,
* no actions are necessary and setting an init method is not needed
*
* @param dataSource the DataSource to use with this DAO
* @param threadBoundDataSource if true then the DataSource will be bound to threads and
* only unbound and closed when {@link #closeConnection()} is called,
* otherwise a new DataSource is obtained each time,
* this has no effect if the DataSource is a Spring DataSource
* @param databaseType the databaseType that this DAO is connecting to (use constants in {@link DatabaseTranslator})
* @param autoDDL if true then DDL is executed on DAO startup (can be run manually if desired)
* @param autoCommitDDL if true then commit is executed after each DDL file is executed, if false then you need a TX manager to do this for you
* @param dataMappers the data mappers which map this DAO to the tables
*/
public JdbcGeneralGenericDao(DataSource dataSource, boolean threadBoundDataSource,
String databaseType, boolean autoDDL, boolean autoCommitDDL, DataMapper[] dataMappers) {
super(dataSource, threadBoundDataSource, databaseType, autoDDL, autoCommitDDL, dataMappers);
}
protected class MyPSS implements BatchPreparedStatementSetter {
private NamesRecord namesRecord;
private List<String> dataKeys = new ArrayList<String>();
private List<Object> entities = new ArrayList<Object>();
public int getBatchSize() {
return entities.size();
}
public void setValues(PreparedStatement ps, int index) throws SQLException {
Object entity = entities.get(index);
Map<String, Object> data = makeMapFromEntity(entity);
for (int i = 0; i < dataKeys.size(); i++) {
String column = dataKeys.get(i);
Object value = data.get(column);
// convert value if needed
value = JDBCUtils.convertColumn(namesRecord, column, value);
ps.setObject((i+1), value);
}
}
/**
* @param dataKeys the data keys (column names)
* @param entities the objects to batch, each will be converted to a map
* @param namesRecord the namesRecord for the persistent classes
*/
public MyPSS(List<String> dataKeys, List<Object> entities, NamesRecord namesRecord) {
this.dataKeys = dataKeys;
this.entities = entities;
this.namesRecord = namesRecord;
}
}
/* (non-Javadoc)
* @see org.sakaiproject.genericdao.api.finders.AllFinder#findAll(java.lang.Class, int, int)
*/
@SuppressWarnings("unchecked")
public <T> List<T> findAll(Class<T> type, int firstResult, int maxResults) {
checkClass(type);
String tableName = getTableNameFromClass(type);
String sql = makeSQL(getSelectTemplate(type), tableName,
StatementMapper.SELECT, tableName + ".*", StatementMapper.WHERE, "");
// handle limit/offset
if (firstResult > 0 || maxResults > 0) {
firstResult = firstResult < 0 ? 0 : firstResult;
maxResults = maxResults < 0 ? 0 : maxResults;
sql = getDatabaseTranslator().makeLimitQuery(sql, firstResult, maxResults, tableName);
getSpringJdbcTemplate().setMaxRows(firstResult + maxResults); // this limit is always ok to put in
}
if (showSQL) {
logInfo("SQL="+sql);
}
List<Map<String, Object>> rMap = getSpringJdbcTemplate().queryForList(sql);
getSpringJdbcTemplate().setMaxRows(0); // reset this to no limit
List<T> results = new ArrayList<T>();
// put the results into objects
int counter = 0;
// SPECIAL handling for DERBY
boolean derby = BasicTranslator.DBTYPE_DERBY.equals(getDatabaseType());
for (Map<String, Object> data : rMap) {
if (derby) {
// derby has to filter results after the fact... lame yes indeed
if (counter < firstResult) {
counter++;
continue;
}
if (maxResults > 0 && results.size() >= maxResults) {
break;
}
}
T entity = makeEntityFromMap(type, data);
results.add(entity);
counter++;
}
return results;
}
// OVERRIDES
/**
* MUST override this method
*/
protected <T> int baseCountAll(Class<T> type) {
String sql = makeSQL(getSelectTemplate(type), getTableNameFromClass(type),
StatementMapper.SELECT, "count(*)", StatementMapper.WHERE, "");
if (showSQL) {
logInfo("SQL="+sql);
}
long count = getSpringJdbcTemplate().queryForLong(sql);
return (int) count;
}
/**
* MUST override this method
*/
protected <T> int baseSaveSet(Class<?> type, Set<T> entities) {
String idProp = getIdProperty(type);
List<String> keys = new ArrayList<String>();
NamesRecord nr = getNamesRecord(type);
List<Object> newObjects = new ArrayList<Object>();
List<Object> existingObjects = new ArrayList<Object>();
for (Object object : entities) {
Object id = ReflectUtils.getInstance().getFieldValue(object, idProp);
if (id == null) {
newObjects.add(object);
} else {
existingObjects.add(object);
}
}
Map<String, Class<?>> types = ReflectUtils.getInstance().getFieldTypes(type);
StringBuilder update = new StringBuilder();
StringBuilder columns = new StringBuilder();
StringBuilder values = new StringBuilder();
int counter = 0;
for (String key : types.keySet()) {
if (idProp.equals(key)) {
continue;
}
if (counter > 0) {
update.append(',');
// insert
columns.append(',');
values.append(',');
}
String column = nr.getColumnForProperty(key);
update.append(column);
update.append("=?");
keys.add(column);
// insert
columns.append(column);
values.append('?');
counter++;
}
// make and do inserts
int changes = 0;
if (newObjects.size() > 0) {
String sql = makeSQL(getInsertTemplate(type), getTableNameFromClass(type),
StatementMapper.COLUMNS, columns.toString(), StatementMapper.VALUES, values.toString());
if (showSQL) {
logInfo("SQL="+sql+":\n BatchCreate="+keys);
}
getSpringJdbcTemplate().batchUpdate(sql, new MyPSS(keys, newObjects, nr));
}
// make and do updates
if (existingObjects.size() > 0) {
keys.add( nr.getColumnForProperty(idProp) );
String sql = makeSQL(getUpdateTemplate(type), getTableNameFromClass(type),
StatementMapper.UPDATE, update.toString(), StatementMapper.WHERE, "where " + getIdColumn(type) + " = ?");
// do the batch update
if (showSQL) {
logInfo("SQL="+sql+":\n BatchUpdate="+keys);
}
getSpringJdbcTemplate().batchUpdate(sql, new MyPSS(keys, existingObjects, nr));
}
return changes;
}
/**
* MUST override this method
*/
protected <T> int baseDeleteSet(Class<T> type, Serializable[] ids) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < ids.length; i++) {
Object id = ids[i];
if (id != null) {
if (i > 0) { sb.append(','); }
sb.append('?');
}
}
String sql = makeSQL(getDeleteTemplate(type), getTableNameFromClass(type),
StatementMapper.WHERE, "where " + getIdColumn(type) + " in (" + sb + ")");
if (showSQL) {
logInfo("SQL="+sql+":\n BatchDelete="+ArrayUtils.arrayToString(ids));
}
int changes = getSpringJdbcTemplate().update(sql, ids);
return changes;
}
// COMMON CODE
public <T> List<T> findAll(Class<T> type) {
return findAll(type, 0, 0);
}
public <T> int countAll(Class<T> type) {
checkClass(type);
int count = 0;
// check the cache first
boolean usedCache = false;
String searchCacheName = getSearchCacheName(type);
String cacheKey = "countAll::" + type.getName();
if (getCacheProvider().exists(searchCacheName, cacheKey)) {
Integer iCount = (Integer) getCacheProvider().get(searchCacheName, cacheKey);
if (iCount != null) {
count = iCount.intValue();
usedCache = true;
}
}
if (! usedCache) {
count = baseCountAll(type);
// cache the id results for the search
getCacheProvider().put(searchCacheName, cacheKey, Integer.valueOf(count));
}
return count;
}
public <T> void deleteSet(Class<T> type, Serializable[] ids) {
checkClass(type);
if (ids.length > 0) {
String operation = "deleteSet";
beforeWrite(operation, type, ids, null);
int changes = baseDeleteSet(type, ids);
afterWrite(operation, type, ids, null, changes);
// clear all removed items from the cache
String cacheName = getCacheName(type);
for (int i = 0; i < ids.length; i++) {
if (ids[i] != null) {
String key = ids[i].toString();
getCacheProvider().remove(cacheName, key);
}
}
// clear the search caches
getCacheProvider().clear(getSearchCacheName(type));
}
}
public <T> void saveSet(Set<T> entities) {
if (entities == null || entities.isEmpty()) {
System.out.println("WARN: Empty list of entities for saveSet, nothing to do...");
} else {
Class<?> type = checkEntitySet(entities);
String operation = "saveSet";
beforeWrite(operation, type, null, entities.toArray());
int changes = baseSaveSet(type, entities);
afterWrite(operation, type, null, entities.toArray(), changes);
// clear all saved items from the cache
String cacheName = getCacheName(type);
for (T t : entities) {
Object id = baseGetIdValue(t);
if (id != null) {
String key = id.toString();
getCacheProvider().remove(cacheName, key);
}
}
// clear the search caches
getCacheProvider().clear(getSearchCacheName(type));
}
}
public <T> void deleteSet(Set<T> entities) {
if (entities.size() > 0) {
Class<?> type = checkEntitySet(entities);
List<Object> ids = new ArrayList<Object>();
for (T t : entities) {
Object id = baseGetIdValue(t);
if (id != null) {
ids.add(id);
}
}
deleteSet(type, ids.toArray(new Serializable[ids.size()]));
}
}
@SuppressWarnings("unchecked")
public void saveMixedSet(Set[] entitySets) {
for (int i=0; i<entitySets.length; i++) {
checkEntitySet(entitySets[i]);
}
for (int i=0; i<entitySets.length; i++) {
saveSet(entitySets[i]);
}
}
@SuppressWarnings("unchecked")
public void deleteMixedSet(Set[] entitySets) {
for (int i=0; i<entitySets.length; i++) {
checkEntitySet(entitySets[i]);
}
for (int i=0; i<entitySets.length; i++) {
deleteSet(entitySets[i]);
}
}
/**
* Validates the class type and the list of entities before performing
* a batch operation (throws IllegalArgumentException)
*
* @param entities a Set of persistent entities, should all be of the same type
*/
protected Class<?> checkEntitySet(Set<?> entities) {
Class<?> entityClass = null;
Iterator<?> it = entities.iterator();
while(it.hasNext()) {
Object entity = it.next();
if (entityClass == null) {
entityClass = (Class<?>) findClass(entity);
}
if (! checkClass(entityClass).isInstance(entity)) {
throw new IllegalArgumentException("Entity set item " +
entity.toString() + " is not of type: " + entityClass +
", the type is: " + entity.getClass() +
" (All items must be of consistent persistent type)");
}
}
return entityClass;
}
}
| |
package org.opensextant.xlayer.server;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.opensextant.data.TextInput;
import org.opensextant.extraction.Extractor;
import org.opensextant.processing.Parameters;
import org.opensextant.util.TextUtils;
import org.restlet.data.Form;
import org.restlet.ext.json.JsonRepresentation;
import org.restlet.representation.Representation;
import org.restlet.resource.ServerResource;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
public abstract class TaggerResource extends ServerResource {
/**
* The log.
*/
protected Logger log = null;
/**
* The test mode.
*/
protected static final boolean testMode = false;
/**
* The prod mode.
*/
protected static final boolean prodMode = !testMode;
public TaggerResource() {
super();
}
protected String operation = null;
public final static String FLD_FEATURES = "features";
public final static String FLD_STATUS = "status";
/**
* operational parameter.
*/
@Override
public void doInit() {
operation = this.getAttribute("operation");
}
public String getProperty(String k) {
return (String) this.getApplication().getContext().getAttributes().get(k);
}
/**
* Ping. trivial thing for now.
*
* @return status
*/
public Representation ping() {
JSONObject ping = new JSONObject();
ping.put(FLD_STATUS, "OK");
ping.put("version", getProperty("version"));
return new JsonRepresentation(ping);
}
public abstract Extractor getExtractor(String xid);
/**
* Implement the processing of a single Input given some request parameters
* Based on the processing
* and the request, format response accordingly.
*
* @param input signal
* @param jobParams controls
* @return JSON or other formatted response.
*/
public abstract Representation process(TextInput input, Parameters jobParams);
/**
* Get parameters for processing? None currently, but may be:
* - lower case tagging or filtering
* - coordinate parsing on|off
* Get parameters for formatting. JSON, HTML, mainly.
* Output represents filters + format.
*
* @param inputs arguments to RESTful request
* @return Xponents Parameters
*/
protected Parameters fromRequest(Form inputs) {
Parameters job = new Parameters();
String list = inputs.getValues(FLD_FEATURES);
job.tag_coordinates = true;
job.tag_countries = true;
job.tag_places = true;
if (isNotBlank(list)) {
HashSet<String> features = new HashSet<>(TextUtils.string2list(list.toLowerCase(), ","));
parseParameters(job, features);
}
String fmt = inputs.getFirstValue("format");
if (fmt != null) {
job.addOutputFormat(fmt);
}
return job;
}
protected void parseParameters(Parameters p, Set<String> kv) {
p.tag_coordinates = kv.contains("coordinates");
p.tag_countries = kv.contains("countries");
p.tag_places = kv.contains("places");
p.tag_postal = kv.contains("postal"); /* Postal tuples which include codes */
p.tag_codes = kv.contains("codes"); /* literal codes, 'IX', 'VQS', 'MA' */
if (kv.contains("geo")) {
p.tag_coordinates = true;
p.tag_countries = true;
p.tag_places = true;
}
// Request tagging on demand.
p.tag_taxons = (kv.contains("taxons") || kv.contains("orgs") || kv.contains("persons"));
p.tag_patterns = kv.contains("patterns") || kv.contains("dates");
p.output_filtered = kv.contains("filtered_out");
}
/**
* Convenience helper to reset data.
*
* @param job job parameters
*/
protected void resetParameters(Parameters job) {
job.tag_lowercase = false;
job.tag_coordinates = false;
job.tag_countries = false;
job.tag_places = false;
job.tag_postal = false;
job.tag_taxons = false;
job.tag_patterns = false;
job.output_geohash = false;
job.output_filtered = false;
job.addOutputFormat("json");
}
/**
* @param a JSONArray
* @return
*/
protected List<String> fromArray(JSONArray a) {
ArrayList<String> strings = new ArrayList<>();
for (Object o : a) {
strings.add((String) o);
}
return strings;
}
/**
* @param inputs the inputs
* @return job parameters
* @throws JSONException on error.
*/
protected Parameters fromRequest(JSONObject inputs) throws JSONException {
Parameters job = new Parameters();
job.tag_coordinates = false;
job.tag_countries = true;
job.tag_places = true;
/** Coordinates are not reverse geocoded by default. */
job.resolve_localities = false;
job.tag_taxons = true;
job.tag_patterns = true;
job.output_filtered = false;
job.addOutputFormat("json");
if (inputs.has(FLD_FEATURES)) {
resetParameters(job);
String list = inputs.getString(FLD_FEATURES);
Set<String> features = new HashSet<>(TextUtils.string2list(list.toLowerCase(), ","));
this.parseParameters(job, features);
}
if (inputs.has("options")) {
String list = inputs.getString("options");
Set<String> opts = new HashSet<>(TextUtils.string2list(list.toLowerCase(), ","));
job.clean_input = opts.contains("clean_input");
job.tag_lowercase = opts.contains("lowercase");
job.resolve_localities = opts.contains("revgeo") || opts.contains("resolve_localities");
}
//
// Geographic filters
if (inputs.has("preferred_countries")) {
job.preferredGeography.put("countries", fromArray(inputs.getJSONArray("preferred_countries")));
}
if (inputs.has("preferred_locations")) {
job.preferredGeography.put("geohashes", fromArray(inputs.getJSONArray("preferred_locations")));
}
if (job.clean_input || job.tag_lowercase) {
job.isdefault = false;
}
return job;
}
/**
* @param status status
* @param error error msg
* @return json formatted response
*/
protected JsonRepresentation status(String status, String error) {
JSONObject s = new JSONObject();
try {
if (error != null) {
s.put(FLD_STATUS, status);
s.put("error", error);
} else {
s.put(FLD_STATUS, status);
}
} catch (JSONException jsonErr) {
error("Trivial JSON issue!!!!", jsonErr);
}
return new JsonRepresentation(s.toString());
}
public void error(String msg, Exception err) {
if (err == null) {
log.severe(msg);
} else {
log.severe(msg + " ERR:" + err.getMessage());
}
}
public void info(String msg) {
log.info(msg);
}
public void debug(String msg) {
if (isDebug()) {
log.fine(msg);
}
}
public boolean isDebug() {
return (log.getLevel() == Level.FINE || log.getLevel() == Level.FINEST || log.getLevel() == Level.FINER);
}
}
| |
package it.unibz.krdb.obda.owlapi3.directmapping;
/*
* #%L
* ontop-obdalib-owlapi3
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.model.CQIE;
import it.unibz.krdb.obda.model.Function;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.OBDADataSource;
import it.unibz.krdb.obda.model.OBDAMappingAxiom;
import it.unibz.krdb.obda.model.OBDAModel;
import it.unibz.krdb.obda.model.OBDAQuery;
import it.unibz.krdb.obda.model.Predicate.COL_TYPE;
import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl;
import it.unibz.krdb.obda.model.impl.OBDAModelImpl;
import it.unibz.krdb.obda.ontology.DataPropertyExpression;
import it.unibz.krdb.obda.ontology.OClass;
import it.unibz.krdb.obda.ontology.ObjectPropertyExpression;
import it.unibz.krdb.obda.ontology.OntologyFactory;
import it.unibz.krdb.obda.ontology.PropertyExpression;
import it.unibz.krdb.obda.ontology.impl.OntologyFactoryImpl;
import it.unibz.krdb.sql.DBMetadata;
import it.unibz.krdb.sql.DataDefinition;
import it.unibz.krdb.sql.JDBCConnectionManager;
import it.unibz.krdb.sql.TableDefinition;
import java.net.URI;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLDataProperty;
import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
import org.semanticweb.owlapi.model.OWLObjectProperty;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.model.OWLOntologyStorageException;
//import com.hp.hpl.jena.iri.impl.IRIFactoryImpl;
//import it.unibz.krdb.obda.model.net.IRIFactory;
/***
*
* A class that provides manipulation for Direct Mapping
*
* @author Victor
*
*/
public class DirectMappingEngine {
private JDBCConnectionManager conMan = null;
private DBMetadata metadata = null;
private String baseuri;
private int mapidx = 1;
private static OntologyFactory ofac = OntologyFactoryImpl.getInstance();
public DirectMappingEngine(String baseUri, int mapnr){
conMan = JDBCConnectionManager.getJDBCConnectionManager();
baseuri = baseUri;
mapidx = mapnr + 1;
}
public DirectMappingEngine(DBMetadata metadata, String baseUri, int mapnr){
this.metadata = metadata;
baseuri = baseUri;
mapidx = mapnr + 1;
}
/*
* set the base URI used in the ontology
*/
public void setBaseURI(String prefix){
if(prefix.endsWith("#")){
this.baseuri = prefix.replace("#", "/");
}else if(prefix.endsWith("/")){
this.baseuri = prefix;
}else this.baseuri = prefix+"/";
}
/***
* enrich the ontology according to the datasources specified in the OBDAModel
* basically from the database structure
*
* @param ontology
* @param model
*
* @return null
* the ontology is updated
*
* @throws Exceptions
*/
public void enrichOntology(OWLOntology ontology, OBDAModel model) throws OWLOntologyStorageException, SQLException{
List<OBDADataSource> sourcelist = new ArrayList<OBDADataSource>();
sourcelist = model.getSources();
OntoExpansion oe = new OntoExpansion();
if(model.getPrefixManager().getDefaultPrefix().endsWith("/")){
oe.setURI(model.getPrefixManager().getDefaultPrefix());
}else{
oe.setURI(model.getPrefixManager().getDefaultPrefix()+"/");
}
//For each data source, enrich into the ontology
if (metadata == null) {
for (int i = 0; i < sourcelist.size(); i++) {
oe.enrichOntology(conMan.getMetaData(sourcelist.get(i)),
ontology);
}
} else
oe.enrichOntology(this.metadata, ontology);
}
/***
* enrich the ontology according to mappings used in the model
*
* @param manager
* @param model
*
* @return a new ontology storing all classes and properties used in the mappings
*
* @throws Exceptions
*/
public OWLOntology getOntology(OWLOntology ontology, OWLOntologyManager manager, OBDAModel model) throws OWLOntologyCreationException, OWLOntologyStorageException, SQLException{
OWLDataFactory dataFactory = manager.getOWLDataFactory();
Set<OClass> classset = model.getDeclaredClasses();
Set<ObjectPropertyExpression> objectset = model.getDeclaredObjectProperties();
Set<DataPropertyExpression> dataset = model.getDeclaredDataProperties();
//Add all the classes
for(Iterator<OClass> it = classset.iterator(); it.hasNext(); ) {
OWLClass newclass = dataFactory.getOWLClass(IRI.create(it.next().getPredicate().getName()));
OWLDeclarationAxiom declarationAxiom = dataFactory.getOWLDeclarationAxiom(newclass);
manager.addAxiom(ontology,declarationAxiom );
}
//Add all the object properties
for(Iterator<ObjectPropertyExpression> it = objectset.iterator(); it.hasNext();){
OWLObjectProperty newclass = dataFactory.getOWLObjectProperty(IRI.create(it.next().getPredicate().getName().toString()));
OWLDeclarationAxiom declarationAxiom = dataFactory.getOWLDeclarationAxiom(newclass);
manager.addAxiom(ontology,declarationAxiom );
}
//Add all the data properties
for(Iterator<DataPropertyExpression> it = dataset.iterator(); it.hasNext();){
OWLDataProperty newclass = dataFactory.getOWLDataProperty(IRI.create(it.next().getPredicate().getName().toString()));
OWLDeclarationAxiom declarationAxiom = dataFactory.getOWLDeclarationAxiom(newclass);
manager.addAxiom(ontology,declarationAxiom );
}
return ontology;
}
/***
* extract all the mappings from a datasource
*
* @param source
*
* @return a new OBDA Model containing all the extracted mappings
* @throws Exception
*/
public OBDAModel extractMappings(OBDADataSource source) throws Exception{
OBDAModelImpl model = new OBDAModelImpl();
return extractMappings(model, source);
}
public OBDAModel extractMappings(OBDAModel model, OBDADataSource source) throws Exception{
insertMapping(source, model);
return model;
}
/***
* extract mappings from given datasource, and insert them into the given model
*
* @param source
* @param model
*
* @return null
*
* Duplicate Exception may happen,
* since mapping id is generated randomly and same id may occur
* @throws Exception
*/
public void insertMapping(OBDADataSource source, OBDAModel model) throws Exception{
model.addSource(source);
insertMapping(conMan.getMetaData(source),model,source.getSourceID());
}
public void insertMapping(DBMetadata metadata, OBDAModel model, URI sourceUri) throws Exception{
if (baseuri == null || baseuri.isEmpty())
this.baseuri = model.getPrefixManager().getDefaultPrefix();
List<TableDefinition> tables = metadata.getTableList();
List<OBDAMappingAxiom> mappingAxioms = new ArrayList<OBDAMappingAxiom>();
for (int i = 0; i < tables.size(); i++) {
TableDefinition td = tables.get(i);
model.addMappings(sourceUri, getMapping(td, metadata, baseuri));
}
model.addMappings(sourceUri, mappingAxioms);
for (URI uri : model.getMappings().keySet()) {
for (OBDAMappingAxiom mapping : model.getMappings().get(uri)) {
OBDAQuery q = mapping.getTargetQuery();
CQIE rule = (CQIE) q;
for (Function f : rule.getBody()) {
if (f.getArity() == 1)
model.declareClass(ofac.createClass(f.getFunctionSymbol().getName()));
else if (f.getFunctionSymbol().getType(1)
.equals(COL_TYPE.OBJECT))
model.declareObjectProperty(ofac.createObjectProperty(f.getFunctionSymbol().getName()));
else
model.declareDataProperty(ofac.createDataProperty(f.getFunctionSymbol().getName()));
}
}
}
}
/***
* generate a mapping axiom from a table of some database
*
* @param table : the datadefinition from which mappings are extraced
* @param source : datasource that the table may refer to, such as foreign keys
*
* @return a List of OBDAMappingAxiom-s
* @throws Exception
*/
public List<OBDAMappingAxiom> getMapping(DataDefinition table, OBDADataSource source) throws Exception{
return getMapping(table,conMan.getMetaData(source),baseuri);
}
/***
* generate a mapping axiom from a table of a database
*
* @param table : the datadefinition from which mappings are extraced
* @param metadata : the metadata of the database required
* @param baseUri : the base uri needed for direct mapping axiom
*
* @return a List of OBDAMappingAxiom-s
* @throws Exception
*/
public List<OBDAMappingAxiom> getMapping(DataDefinition table, DBMetadata metadata, String baseUri) throws Exception {
OBDADataFactory dfac = OBDADataFactoryImpl.getInstance();
DirectMappingAxiom dma=null;
dma = new DirectMappingAxiom(baseUri, table, metadata, dfac);
dma.setbaseuri(baseUri);
List<OBDAMappingAxiom> axioms = new ArrayList<OBDAMappingAxiom>();
axioms.add(dfac.getRDBMSMappingAxiom("MAPPING-ID"+mapidx,dma.getSQL(), dma.getCQ()));
mapidx++;
Map<String, CQIE> refAxioms = dma.getRefAxioms();
for (String refSQL : refAxioms.keySet()) {
axioms.add(dfac.getRDBMSMappingAxiom("MAPPING-ID"+mapidx, refSQL, refAxioms.get(refSQL)));
mapidx++;
}
return axioms;
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.ui.wizards.buildpaths.newsourcepage;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.SubProgressMonitor;
import org.eclipse.jface.operation.IRunnableContext;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.ui.IWorkbenchSite;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ISetSelectionTarget;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.JavaModelException;
import org.eclipse.jdt.internal.corext.buildpath.BuildpathDelta;
import org.eclipse.jdt.internal.corext.buildpath.ClasspathModifier;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.wizards.NewWizardMessages;
import org.eclipse.jdt.internal.ui.wizards.buildpaths.CPListElement;
import org.eclipse.jdt.internal.ui.wizards.buildpaths.CPListElementAttribute;
//Warning: This is unused and untested code. Images and descriptions are missing too.
//SelectedElements iff enabled: IJavaProject || IPackageFragmentRoot || CPListElementAttribute
public class ResetAction extends BuildpathModifierAction {
private final IRunnableContext fContext;
public ResetAction(IWorkbenchSite site) {
this(site, null, PlatformUI.getWorkbench().getProgressService());
}
public ResetAction(IRunnableContext context, ISetSelectionTarget selectionTarget) {
this(null, selectionTarget, context);
}
public ResetAction(IWorkbenchSite site, ISetSelectionTarget selectionTarget, IRunnableContext context) {
super(site, selectionTarget, BuildpathModifierAction.RESET);
fContext= context;
setText(NewWizardMessages.NewSourceContainerWorkbookPage_ToolBar_Reset_tooltip);
setToolTipText(NewWizardMessages.NewSourceContainerWorkbookPage_ToolBar_Reset_tooltip);
}
/**
* {@inheritDoc}
*/
@Override
public String getDetailedDescription() {
if (!isEnabled())
return null;
Iterator<?> iterator= getSelectedElements().iterator();
Object p= iterator.next();
while (iterator.hasNext()) {
Object q= iterator.next();
if (
(p instanceof CPListElementAttribute && !(q instanceof CPListElementAttribute)) ||
(q instanceof CPListElementAttribute && !(p instanceof CPListElementAttribute))
) {
return NewWizardMessages.PackageExplorerActionGroup_FormText_Default_Reset;
}
p= q;
}
if (p instanceof CPListElementAttribute) {
return NewWizardMessages.PackageExplorerActionGroup_FormText_SetOutputToDefault;
} else {
return NewWizardMessages.PackageExplorerActionGroup_FormText_ResetFilters;
}
}
/**
* {@inheritDoc}
*/
@Override
public void run() {
final IRunnableWithProgress runnable= new IRunnableWithProgress() {
public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
Object firstElement= getSelectedElements().get(0);
IJavaProject project= null;
if (firstElement instanceof IJavaProject) {
project= (IJavaProject)firstElement;
} else if (firstElement instanceof IPackageFragmentRoot) {
project= ((IPackageFragmentRoot)firstElement).getJavaProject();
} else {
project= ((CPListElementAttribute)firstElement).getParent().getJavaProject();
}
List<Object> result= reset(getSelectedElements(), project, monitor);
selectAndReveal(new StructuredSelection(result));
} catch (CoreException e) {
throw new InvocationTargetException(e);
}
}
};
try {
fContext.run(false, false, runnable);
} catch (InvocationTargetException e) {
if (e.getCause() instanceof CoreException) {
showExceptionDialog((CoreException)e.getCause(), ""); //$NON-NLS-1$
} else {
JavaPlugin.log(e);
}
} catch (InterruptedException e) {
}
}
private List<Object> reset(List<?> selection, IJavaProject project, IProgressMonitor monitor) throws JavaModelException {
if (monitor == null)
monitor= new NullProgressMonitor();
try {
monitor.beginTask(NewWizardMessages.ClasspathModifier_Monitor_Resetting, selection.size());
List<CPListElement> entries= ClasspathModifier.getExistingEntries(project);
List<Object> result= new ArrayList<Object>();
for (int i= 0; i < selection.size(); i++) {
Object element= selection.get(i);
if (element instanceof IJavaElement) {
IJavaElement javaElement= (IJavaElement) element;
IPackageFragmentRoot root;
if (element instanceof IJavaProject)
root= project.getPackageFragmentRoot(project.getResource());
else
root= (IPackageFragmentRoot) element;
CPListElement entry= ClasspathModifier.getClasspathEntry(entries, root);
ClasspathModifier.resetFilters(javaElement, entry, project, new SubProgressMonitor(monitor, 1));
result.add(javaElement);
} else {
CPListElement selElement= ((CPListElementAttribute) element).getParent();
CPListElement entry= ClasspathModifier.getClasspathEntry(entries, selElement);
CPListElementAttribute outputFolder= ClasspathModifier.resetOutputFolder(entry, project);
result.add(outputFolder);
}
}
ClasspathModifier.commitClassPath(entries, project, null);
BuildpathDelta delta= new BuildpathDelta(getToolTipText());
delta.setNewEntries(entries.toArray(new CPListElement[entries.size()]));
informListeners(delta);
return result;
} finally {
monitor.done();
}
}
@Override
protected boolean canHandle(IStructuredSelection elements) {
try {
for (Iterator<?> iterator= elements.iterator(); iterator.hasNext();) {
Object element= iterator.next();
if (element instanceof IJavaProject) {
IJavaProject project= (IJavaProject)element;
if (!project.isOnClasspath(project))
return false;
IClasspathEntry entry= ClasspathModifier.getClasspathEntryFor(project.getPath(), project, IClasspathEntry.CPE_SOURCE);
if (entry.getInclusionPatterns().length == 0 && entry.getExclusionPatterns().length == 0)
return false;
return true;
} else if (element instanceof IPackageFragmentRoot) {
if (ClasspathModifier.filtersSet((IPackageFragmentRoot)element))
return true;
} else if (element instanceof CPListElementAttribute) {
if (!ClasspathModifier.isDefaultOutputFolder((CPListElementAttribute)element))
return true;
} else {
return false;
}
}
} catch (JavaModelException e) {
return false;
}
return false;
}
}
| |
/*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.newsecurity.filters;
import com.thoughtworks.go.ClearSingleton;
import com.thoughtworks.go.http.mocks.*;
import com.thoughtworks.go.server.newsecurity.SessionUtilsHelper;
import com.thoughtworks.go.server.newsecurity.models.AccessToken;
import com.thoughtworks.go.server.newsecurity.models.AnonymousCredential;
import com.thoughtworks.go.server.newsecurity.models.AuthenticationToken;
import com.thoughtworks.go.server.newsecurity.models.UsernamePassword;
import com.thoughtworks.go.server.newsecurity.providers.AnonymousAuthenticationProvider;
import com.thoughtworks.go.server.newsecurity.providers.PasswordBasedPluginAuthenticationProvider;
import com.thoughtworks.go.server.newsecurity.providers.WebBasedPluginAuthenticationProvider;
import com.thoughtworks.go.server.newsecurity.utils.SessionUtils;
import com.thoughtworks.go.server.service.SecurityService;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.TestingClock;
import org.junit.Rule;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.migrationsupport.rules.EnableRuleMigrationSupport;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.util.Collections;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.*;
@EnableRuleMigrationSupport
public class AbstractReAuthenticationFilterTest {
@Rule
public final ClearSingleton clearSingleton = new ClearSingleton();
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private FilterChain filterChain;
private SecurityService securityService;
private PasswordBasedPluginAuthenticationProvider passwordBasedPluginAuthenticationProvider;
private WebBasedPluginAuthenticationProvider webBasedPluginAuthenticationProvider;
private TestingClock clock;
private AbstractReAuthenticationFilter filter;
private SystemEnvironment systemEnvironment;
private AnonymousAuthenticationProvider anonymousAuthenticationProvider;
@BeforeEach
void setUp() {
response = new MockHttpServletResponse();
filterChain = mock(FilterChain.class);
securityService = mock(SecurityService.class);
systemEnvironment = mock(SystemEnvironment.class);
when(systemEnvironment.isReAuthenticationEnabled()).thenReturn(true);
passwordBasedPluginAuthenticationProvider = mock(PasswordBasedPluginAuthenticationProvider.class);
webBasedPluginAuthenticationProvider = mock(WebBasedPluginAuthenticationProvider.class);
anonymousAuthenticationProvider = mock(AnonymousAuthenticationProvider.class);
clock = new TestingClock();
filter = spy(new AbstractReAuthenticationFilter(securityService, systemEnvironment, clock, passwordBasedPluginAuthenticationProvider, webBasedPluginAuthenticationProvider, anonymousAuthenticationProvider) {
@Override
protected void onAuthenticationFailure(HttpServletRequest request,
HttpServletResponse response,
String errorMessage) throws IOException {
}
});
}
@Nested
class SecurityDisabled {
@BeforeEach
void setUp() {
when(securityService.isSecurityEnabled()).thenReturn(false);
}
@Test
void shouldContinueWithChain() throws ServletException, IOException {
request = HttpRequestBuilder.GET("/")
.build();
final HttpSession originalSession = request.getSession(true);
assertThat(SessionUtils.getAuthenticationToken(request)).isNull();
filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
assertThat(SessionUtils.getAuthenticationToken(request)).isNull();
MockHttpServletRequestAssert.assertThat(request)
.hasSameSession(originalSession);
MockHttpServletResponseAssert.assertThat(response)
.isOk();
verify(filter, never()).onAuthenticationFailure(any(), any(), any());
}
}
@Nested
class SecurityEnabled {
@BeforeEach
void setUp() {
when(securityService.isSecurityEnabled()).thenReturn(true);
}
@Test
void shouldContinueExecutionOfFilterChainIfSessionDoesNotHaveAuthenticationToken() throws IOException, ServletException {
request = new MockHttpServletRequest();
filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
MockHttpServletResponseAssert.assertThat(response)
.isOk();
verify(filter, never()).onAuthenticationFailure(any(), any(), any());
}
@Test
void shouldReAuthenticateUsernamePasswordTokenWhenItHasExpired() throws IOException, ServletException {
request = HttpRequestBuilder.GET("/").build();
final AuthenticationToken<UsernamePassword> authenticationToken = SessionUtilsHelper.createUsernamePasswordAuthentication("bob", "p@ssw0rd", clock.currentTimeMillis());
SessionUtilsHelper.setAuthenticationToken(request, authenticationToken);
clock.addSeconds(3601);
when(systemEnvironment.getReAuthenticationTimeInterval()).thenReturn(3600 * 1000L);
final AuthenticationToken<UsernamePassword> reAuthenticatedToken = SessionUtilsHelper.createUsernamePasswordAuthentication("bob", "p@ssw0rd", clock.currentTimeMillis());
when(passwordBasedPluginAuthenticationProvider.reauthenticate(authenticationToken)).thenReturn(reAuthenticatedToken);
filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
assertThat(authenticationToken).isNotSameAs(reAuthenticatedToken);
assertThat(SessionUtils.getAuthenticationToken(request)).isSameAs(reAuthenticatedToken);
MockHttpServletResponseAssert.assertThat(response)
.isOk();
verify(filter, never()).onAuthenticationFailure(any(), any(), any());
}
@Test
void shouldReAuthenticateWebBasedTokenWhenItHasExpired() throws IOException, ServletException {
request = HttpRequestBuilder.GET("/").build();
final AuthenticationToken<AccessToken> authenticationToken = SessionUtilsHelper.createWebAuthentication(Collections.singletonMap("access_token", "some-token"), clock.currentTimeMillis());
SessionUtilsHelper.setAuthenticationToken(request, authenticationToken);
clock.addSeconds(3601);
when(systemEnvironment.getReAuthenticationTimeInterval()).thenReturn(3600 * 1000L);
final AuthenticationToken<AccessToken> reAuthenticatedToken = SessionUtilsHelper.createWebAuthentication(Collections.singletonMap("access_token", "some-token"), clock.currentTimeMillis());
when(webBasedPluginAuthenticationProvider.reauthenticate(authenticationToken)).thenReturn(reAuthenticatedToken);
filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
assertThat(authenticationToken).isNotSameAs(reAuthenticatedToken);
assertThat(SessionUtils.getAuthenticationToken(request)).isSameAs(reAuthenticatedToken);
MockHttpServletResponseAssert.assertThat(response)
.isOk();
verify(filter, never()).onAuthenticationFailure(any(), any(), any());
}
@Test
void shouldReAuthenticateAnonymousTokenWhenItHasExpired() throws IOException, ServletException {
request = HttpRequestBuilder.GET("/").build();
SessionUtilsHelper.loginAsAnonymous(request);
AuthenticationToken<AnonymousCredential> authenticationToken = (AuthenticationToken<AnonymousCredential>) SessionUtils.getAuthenticationToken(request);
clock.addSeconds(3601);
when(systemEnvironment.getReAuthenticationTimeInterval()).thenReturn(3600 * 1000L);
final AuthenticationToken<AnonymousCredential> reAuthenticatedToken = SessionUtilsHelper.createAnonymousAuthentication(clock.currentTimeMillis());
when(anonymousAuthenticationProvider.reauthenticate(authenticationToken)).thenReturn(reAuthenticatedToken);
filter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
assertThat(authenticationToken).isNotSameAs(reAuthenticatedToken);
assertThat(SessionUtils.getAuthenticationToken(request)).isSameAs(reAuthenticatedToken);
MockHttpServletResponseAssert.assertThat(response)
.isOk();
verify(filter, never()).onAuthenticationFailure(any(), any(), any());
}
@Test
void shouldErrorOutIfUsernamePasswordTokenReAuthenticationFails() throws IOException, ServletException {
request = HttpRequestBuilder.GET("/").build();
final AuthenticationToken<UsernamePassword> authenticationToken = SessionUtilsHelper.createUsernamePasswordAuthentication("bob", "p@ssw0rd", clock.currentTimeMillis());
SessionUtilsHelper.setAuthenticationToken(request, authenticationToken);
clock.addSeconds(3601);
when(systemEnvironment.getReAuthenticationTimeInterval()).thenReturn(3600 * 1000L);
when(passwordBasedPluginAuthenticationProvider.reauthenticate(authenticationToken)).thenReturn(null);
filter.doFilter(request, response, filterChain);
verifyZeroInteractions(filterChain);
verify(filter).onAuthenticationFailure(request, response, "Unable to re-authenticate user after timeout.");
}
@Test
void shouldErrorOutIfWebBasedTokenReAuthenticationFails() throws IOException, ServletException {
request = HttpRequestBuilder.GET("/").build();
final AuthenticationToken<AccessToken> authenticationToken = SessionUtilsHelper.createWebAuthentication(Collections.singletonMap("access_token", "some-token"), clock.currentTimeMillis());
SessionUtilsHelper.setAuthenticationToken(request, authenticationToken);
clock.addSeconds(3601);
when(systemEnvironment.getReAuthenticationTimeInterval()).thenReturn(3600 * 1000L);
when(webBasedPluginAuthenticationProvider.reauthenticate(authenticationToken)).thenReturn(null);
filter.doFilter(request, response, filterChain);
verifyZeroInteractions(filterChain);
verify(filter).onAuthenticationFailure(request, response, "Unable to re-authenticate user after timeout.");
}
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.service;
import com.google.common.collect.Lists;
import com.navercorp.pinpoint.common.server.bo.SpanBo;
import com.navercorp.pinpoint.common.server.bo.SpanEventBo;
import com.navercorp.pinpoint.common.service.ServiceTypeRegistryService;
import com.navercorp.pinpoint.common.util.TransactionId;
import com.navercorp.pinpoint.web.applicationmap.ApplicationMap;
import com.navercorp.pinpoint.web.applicationmap.ApplicationMapBuilder;
import com.navercorp.pinpoint.web.applicationmap.ApplicationMapBuilderFactory;
import com.navercorp.pinpoint.web.applicationmap.ApplicationMapWithScatterData;
import com.navercorp.pinpoint.web.applicationmap.appender.histogram.DefaultNodeHistogramFactory;
import com.navercorp.pinpoint.web.applicationmap.appender.histogram.NodeHistogramFactory;
import com.navercorp.pinpoint.web.applicationmap.appender.histogram.datasource.ResponseHistogramsNodeHistogramDataSource;
import com.navercorp.pinpoint.web.applicationmap.appender.histogram.datasource.WasNodeHistogramDataSource;
import com.navercorp.pinpoint.web.applicationmap.appender.server.DefaultServerInstanceListFactory;
import com.navercorp.pinpoint.web.applicationmap.appender.server.ServerInstanceListFactory;
import com.navercorp.pinpoint.web.applicationmap.appender.server.datasource.AgentInfoServerInstanceListDataSource;
import com.navercorp.pinpoint.web.applicationmap.appender.server.datasource.ServerInstanceListDataSource;
import com.navercorp.pinpoint.web.applicationmap.link.LinkType;
import com.navercorp.pinpoint.web.dao.ApplicationTraceIndexDao;
import com.navercorp.pinpoint.web.dao.TraceDao;
import com.navercorp.pinpoint.web.filter.Filter;
import com.navercorp.pinpoint.web.scatter.ScatterData;
import com.navercorp.pinpoint.web.security.ServerMapDataFilter;
import com.navercorp.pinpoint.web.service.map.FilteredMap;
import com.navercorp.pinpoint.web.service.map.FilteredMapBuilder;
import com.navercorp.pinpoint.web.vo.Application;
import com.navercorp.pinpoint.web.vo.LimitedScanResult;
import com.navercorp.pinpoint.web.vo.LoadFactor;
import com.navercorp.pinpoint.web.vo.Range;
import com.navercorp.pinpoint.web.vo.SelectedScatterArea;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.util.StopWatch;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author netspider
* @author emeroad
* @author minwoo.jung
*/
@Service
public class FilteredMapServiceImpl implements FilteredMapService {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
private AgentInfoService agentInfoService;
@Autowired
@Qualifier("hbaseTraceDaoFactory")
private TraceDao traceDao;
@Autowired
private ApplicationTraceIndexDao applicationTraceIndexDao;
@Autowired
private ServiceTypeRegistryService registry;
@Autowired
private ApplicationFactory applicationFactory;
@Autowired(required=false)
private ServerMapDataFilter serverMapDataFilter;
@Autowired
private ApplicationMapBuilderFactory applicationMapBuilderFactory;
private static final Object V = new Object();
@Override
public LimitedScanResult<List<TransactionId>> selectTraceIdsFromApplicationTraceIndex(String applicationName, Range range, int limit) {
return selectTraceIdsFromApplicationTraceIndex(applicationName, range, limit, true);
}
@Override
public LimitedScanResult<List<TransactionId>> selectTraceIdsFromApplicationTraceIndex(String applicationName, Range range, int limit, boolean backwardDirection) {
if (applicationName == null) {
throw new NullPointerException("applicationName must not be null");
}
if (range == null) {
throw new NullPointerException("range must not be null");
}
if (logger.isTraceEnabled()) {
logger.trace("scan(selectTraceIdsFromApplicationTraceIndex) {}, {}", applicationName, range);
}
return this.applicationTraceIndexDao.scanTraceIndex(applicationName, range, limit, backwardDirection);
}
@Override
public LimitedScanResult<List<TransactionId>> selectTraceIdsFromApplicationTraceIndex(String applicationName, SelectedScatterArea area, int limit) {
if (applicationName == null) {
throw new NullPointerException("applicationName must not be null");
}
if (area == null) {
throw new NullPointerException("area must not be null");
}
if (logger.isTraceEnabled()) {
logger.trace("scan(selectTraceIdsFromApplicationTraceIndex) {}, {}", applicationName, area);
}
return this.applicationTraceIndexDao.scanTraceIndex(applicationName, area, limit);
}
@Override
@Deprecated
public LoadFactor linkStatistics(Range range, List<TransactionId> traceIdSet, Application sourceApplication, Application destinationApplication, Filter filter) {
if (sourceApplication == null) {
throw new NullPointerException("sourceApplication must not be null");
}
if (destinationApplication == null) {
throw new NullPointerException("destApplicationName must not be null");
}
if (filter == null) {
throw new NullPointerException("filter must not be null");
}
StopWatch watch = new StopWatch();
watch.start();
List<List<SpanBo>> originalList = this.traceDao.selectAllSpans(traceIdSet);
List<SpanBo> filteredTransactionList = filterList(originalList, filter);
LoadFactor statistics = new LoadFactor(range);
// TODO need to handle these separately by node type (like fromToFilter)
// scan transaction list
for (SpanBo span : filteredTransactionList) {
if (sourceApplication.equals(span.getApplicationId(), registry.findServiceType(span.getApplicationServiceType()))) {
List<SpanEventBo> spanEventBoList = span.getSpanEventBoList();
if (spanEventBoList == null) {
continue;
}
// find dest elapsed time
for (SpanEventBo spanEventBo : spanEventBoList) {
if (destinationApplication.equals(spanEventBo.getDestinationId(), registry.findServiceType(spanEventBo.getServiceType()))) {
// find exception
boolean hasException = spanEventBo.hasException();
// add sample
// TODO : need timeslot value instead of the actual value
statistics.addSample(span.getStartTime() + spanEventBo.getStartElapsed(), spanEventBo.getEndElapsed(), 1, hasException);
break;
}
}
}
}
watch.stop();
logger.info("Fetch link statistics elapsed. {}ms", watch.getLastTaskTimeMillis());
return statistics;
}
private List<SpanBo> filterList(List<List<SpanBo>> transactionList, Filter filter) {
final List<SpanBo> filteredResult = new ArrayList<>();
for (List<SpanBo> transaction : transactionList) {
if (filter.include(transaction)) {
filteredResult.addAll(transaction);
}
}
return filteredResult;
}
private List<List<SpanBo>> filterList2(List<List<SpanBo>> transactionList, Filter filter) {
final List<List<SpanBo>> filteredResult = new ArrayList<>();
for (List<SpanBo> transaction : transactionList) {
if (filter.include(transaction)) {
filteredResult.add(transaction);
}
}
return filteredResult;
}
@Override
public ApplicationMap selectApplicationMap(TransactionId transactionId, int version) {
if (transactionId == null) {
throw new NullPointerException("transactionId must not be null");
}
List<TransactionId> transactionIdList = Collections.singletonList(transactionId);
// FIXME from,to -1
Range range = new Range(-1, -1);
final List<List<SpanBo>> filterList = selectFilteredSpan(transactionIdList, Filter.NONE);
FilteredMapBuilder filteredMapBuilder = new FilteredMapBuilder(applicationFactory, registry, range, version);
filteredMapBuilder.serverMapDataFilter(serverMapDataFilter);
filteredMapBuilder.addTransactions(filterList);
FilteredMap filteredMap = filteredMapBuilder.build();
ApplicationMap map = createMap(range, filteredMap);
return map;
}
@Override
public ApplicationMap selectApplicationMapWithScatterData(List<TransactionId> transactionIdList, Range originalRange, Range scanRange, int xGroupUnit, int yGroupUnit, Filter filter, int version) {
if (transactionIdList == null) {
throw new NullPointerException("transactionIdList must not be null");
}
if (filter == null) {
throw new NullPointerException("filter must not be null");
}
StopWatch watch = new StopWatch();
watch.start();
final List<List<SpanBo>> filterList = selectFilteredSpan(transactionIdList, filter);
FilteredMapBuilder filteredMapBuilder = new FilteredMapBuilder(applicationFactory, registry, originalRange, version);
filteredMapBuilder.serverMapDataFilter(serverMapDataFilter);
filteredMapBuilder.addTransactions(filterList);
FilteredMap filteredMap = filteredMapBuilder.build();
ApplicationMap map = createMap(originalRange, filteredMap);
Map<Application, ScatterData> applicationScatterData = filteredMap.getApplicationScatterData(originalRange.getFrom(), originalRange.getTo(), xGroupUnit, yGroupUnit);
ApplicationMapWithScatterData applicationMapWithScatterData = new ApplicationMapWithScatterData(map, applicationScatterData);
watch.stop();
logger.debug("Select filtered application map elapsed. {}ms", watch.getTotalTimeMillis());
return applicationMapWithScatterData;
}
private List<List<SpanBo>> selectFilteredSpan(List<TransactionId> transactionIdList, Filter filter) {
// filters out recursive calls by looking at each objects
// do not filter here if we change to a tree-based collision check in the future.
final List<TransactionId> recursiveFilterList = recursiveCallFilter(transactionIdList);
// FIXME might be better to simply traverse the List<Span> and create a process chain for execution
final List<List<SpanBo>> originalList = this.traceDao.selectAllSpans(recursiveFilterList);
return filterList2(originalList, filter);
}
private ApplicationMap createMap(Range range, FilteredMap filteredMap) {
WasNodeHistogramDataSource wasNodeHistogramDataSource = new ResponseHistogramsNodeHistogramDataSource(filteredMap.getResponseHistograms());
NodeHistogramFactory nodeHistogramFactory = new DefaultNodeHistogramFactory(wasNodeHistogramDataSource);
ServerInstanceListDataSource serverInstanceListDataSource = new AgentInfoServerInstanceListDataSource(agentInfoService);
ServerInstanceListFactory serverInstanceListFactory = new DefaultServerInstanceListFactory(serverInstanceListDataSource);
ApplicationMapBuilder applicationMapBuilder = applicationMapBuilderFactory.createApplicationMapBuilder(range);
applicationMapBuilder.linkType(LinkType.DETAILED);
applicationMapBuilder.includeNodeHistogram(nodeHistogramFactory);
applicationMapBuilder.includeServerInfo(serverInstanceListFactory);
ApplicationMap map = applicationMapBuilder.build(filteredMap.getLinkDataDuplexMap());
if(serverMapDataFilter != null) {
map = serverMapDataFilter.dataFiltering(map);
}
return map;
}
private List<TransactionId> recursiveCallFilter(List<TransactionId> transactionIdList) {
if (transactionIdList == null) {
throw new NullPointerException("transactionIdList must not be null");
}
List<TransactionId> crashKey = new ArrayList<>();
Map<TransactionId, Object> filterMap = new LinkedHashMap<>(transactionIdList.size());
for (TransactionId transactionId : transactionIdList) {
Object old = filterMap.put(transactionId, V);
if (old != null) {
crashKey.add(transactionId);
}
}
if (!crashKey.isEmpty()) {
Set<TransactionId> filteredTransactionId = filterMap.keySet();
logger.info("transactionId crash found. original:{} filter:{} crashKey:{}", transactionIdList.size(), filteredTransactionId.size(), crashKey);
return Lists.newArrayList(filteredTransactionId);
}
return transactionIdList;
}
}
| |
/*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.dynamicsqlrow;
import java.sql.ResultSet;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Run dynamic SQL.
* SQL is defined in a field.
*
* @author Samatar
* @since 13-10-2008
*/
public class DynamicSQLRow extends BaseStep implements StepInterface
{
private static Class<?> PKG = DynamicSQLRowMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private DynamicSQLRowMeta meta;
private DynamicSQLRowData data;
public DynamicSQLRow(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans)
{
super(stepMeta, stepDataInterface, copyNr, transMeta, trans);
}
private synchronized void lookupValues(RowMetaInterface rowMeta, Object[] rowData) throws KettleException
{
boolean loadFromBuffer=true;
if (first)
{
first=false;
data.outputRowMeta = rowMeta.clone();
meta.getFields(data.outputRowMeta, getStepname(), new RowMetaInterface[] { meta.getTableFields(), }, null, this);
loadFromBuffer=false;
}
if (log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.CheckingRow")+rowMeta.getString(rowData)); //$NON-NLS-1$
// get dynamic SQL statement
String sqlTemp=getInputRowMeta().getString(rowData,data.indexOfSQLField);
String sql = null;
if (meta.isVariableReplace()) {
sql = environmentSubstitute(sqlTemp);
} else {
sql = sqlTemp;
}
if(log.isDebug()) logDebug(BaseMessages.getString(PKG, "DynamicSQLRow.Log.SQLStatement",sql));
if(meta.isQueryOnlyOnChange())
{
if(loadFromBuffer)
{
if(!data.previousSQL.equals(sql)) loadFromBuffer=false;
}
}else
loadFromBuffer=false;
if(loadFromBuffer)
{
incrementLinesInput();
if(!data.skipPreviousRow)
{
Object[] newRow = RowDataUtil.resizeArray(rowData, data.outputRowMeta.size());
int newIndex = rowMeta.size();
RowMetaInterface addMeta = data.db.getReturnRowMeta();
// read from Buffer
for (int p=0;p<data.previousrowbuffer.size();p++)
{
Object[] getBufferRow=(Object[])data.previousrowbuffer.get(p);
for (int i=0;i<addMeta.size();i++)
{
newRow[newIndex++] = getBufferRow[i];
}
putRow(data.outputRowMeta,data.outputRowMeta.cloneRow(newRow));
}
}
}else
{
if(meta.isQueryOnlyOnChange()) data.previousrowbuffer.clear();
// Set the values on the prepared statement (for faster exec.)
ResultSet rs = data.db.openQuery(sql);
// Get a row from the database...
Object[] add = data.db.getRow(rs);
RowMetaInterface addMeta = data.db.getReturnRowMeta();
// Also validate the data types to make sure we've not place an incorrect template in the dialog...
//
if (add!=null) {
int nrTemplateFields = data.outputRowMeta.size()-getInputRowMeta().size();
if (addMeta.size() != nrTemplateFields) {
throw new KettleException(BaseMessages.getString(PKG, "DynamicSQLRow.Exception.IncorrectNrTemplateFields", nrTemplateFields, addMeta.size(), sql));
}
StringBuilder typeErrors = new StringBuilder();
for (int i=0;i<addMeta.size();i++) {
ValueMetaInterface templateValueMeta = addMeta.getValueMeta(i);
ValueMetaInterface outputValueMeta = data.outputRowMeta.getValueMeta(getInputRowMeta().size()+i);
if (templateValueMeta.getType()!=outputValueMeta.getType()) {
if (typeErrors.length()>0) {
typeErrors.append(Const.CR);
}
typeErrors.append(BaseMessages.getString(PKG, "DynamicSQLRow.Exception.TemplateReturnDataTypeError", templateValueMeta.toString(), outputValueMeta.toString()));
}
}
if (typeErrors.length()>0) {
throw new KettleException(typeErrors.toString());
}
}
incrementLinesInput();
int counter = 0;
while (add!=null && (meta.getRowLimit()==0 || counter<meta.getRowLimit()))
{
counter++;
Object[] newRow = RowDataUtil.resizeArray(rowData, data.outputRowMeta.size());
int newIndex = rowMeta.size();
for (int i=0;i<addMeta.size();i++) {
newRow[newIndex++] = add[i];
}
// we have to clone, otherwise we only get the last new value
putRow(data.outputRowMeta, data.outputRowMeta.cloneRow(newRow));
if(meta.isQueryOnlyOnChange())
{
// add row to the previous rows buffer
data.previousrowbuffer.add(add);
data.skipPreviousRow=false;
}
if (log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "DynamicSQLRow.Log.PutoutRow")+data.outputRowMeta.getString(newRow)); //$NON-NLS-1$
// Get a new row
if (meta.getRowLimit()==0 || counter<meta.getRowLimit())
{
add = data.db.getRow(rs);
incrementLinesInput();
}
}
// Nothing found? Perhaps we have to put something out after all?
if (counter==0 && meta.isOuterJoin())
{
if (data.notfound==null)
{
data.notfound = new Object[data.db.getReturnRowMeta().size()];
}
Object[] newRow = RowDataUtil.resizeArray(rowData, data.outputRowMeta.size());
int newIndex = rowMeta.size();
for (int i=0;i<data.notfound.length;i++) {
newRow[newIndex++] = data.notfound[i];
}
putRow(data.outputRowMeta, newRow);
if(meta.isQueryOnlyOnChange())
{
// add row to the previous rows buffer
data.previousrowbuffer.add(data.notfound);
data.skipPreviousRow=false;
}
} else
{
if(meta.isQueryOnlyOnChange() && counter==0 && !meta.isOuterJoin())
{
data.skipPreviousRow=true;
}
}
if(data.db!=null) data.db.closeQuery(rs);
}
// Save current parameters value as previous ones
if(meta.isQueryOnlyOnChange())
{
data.previousSQL= sql;
}
}
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(DynamicSQLRowMeta)smi;
data=(DynamicSQLRowData)sdi;
Object[] r=getRow(); // Get row from input rowset & set row busy!
if (r==null) // no more input to be expected...
{
setOutputDone();
return false;
}
if (first)
{
if(Const.isEmpty(meta.getSQLFieldName()))
throw new KettleException(BaseMessages.getString(PKG, "DynamicSQLRow.Exception.SQLFieldNameEmpty"));
if(Const.isEmpty(meta.getSql()))
throw new KettleException(BaseMessages.getString(PKG, "DynamicSQLRow.Exception.SQLEmpty"));
// cache the position of the field
if (data.indexOfSQLField<0)
{
data.indexOfSQLField =getInputRowMeta().indexOfValue(meta.getSQLFieldName());
if (data.indexOfSQLField<0)
{
// The field is unreachable !
throw new KettleException(BaseMessages.getString(PKG, "DynamicSQLRow.Exception.FieldNotFound",meta.getSQLFieldName())); //$NON-NLS-1$ //$NON-NLS-2$
}
}
}
try
{
lookupValues(getInputRowMeta(), r);
if (checkFeedback(getLinesRead()))
{
if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.LineNumber")+getLinesRead()); //$NON-NLS-1$
}
}
catch(KettleException e)
{
boolean sendToErrorRow=false;
String errorMessage = null;
if (getStepMeta().isDoingErrorHandling())
{
sendToErrorRow = true;
errorMessage = e.toString();
}
else
{
logError(BaseMessages.getString(PKG, "DynamicSQLRow.Log.ErrorInStepRunning")+e.getMessage()); //$NON-NLS-1$
setErrors(1);
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
if (sendToErrorRow)
{
// Simply add this row to the error row
putError(getInputRowMeta(), r, 1, errorMessage, null, "DynamicSQLRow001");
}
}
return true;
}
/** Stop the running query */
public void stopRunning(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(DynamicSQLRowMeta)smi;
data=(DynamicSQLRowData)sdi;
if (data.db!=null && !data.isCanceled)
{
synchronized(data.db) {
data.db.cancelQuery();
}
setStopped(true);
data.isCanceled=true;
}
}
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta=(DynamicSQLRowMeta)smi;
data=(DynamicSQLRowData)sdi;
if (super.init(smi, sdi))
{
if(meta.getDatabaseMeta()==null) {
logError(BaseMessages.getString(PKG, "DynmaicSQLRow.Init.ConnectionMissing", getStepname()));
return false;
}
data.db=new Database(this, meta.getDatabaseMeta());
data.db.shareVariablesWith(this);
try
{
if (getTransMeta().isUsingUniqueConnections())
{
synchronized (getTrans()) { data.db.connect(getTrans().getTransactionId(), getPartitionID()); }
}
else
{
data.db.connect(getPartitionID());
}
data.db.setCommit(100); // we never get a commit, but it just turns off auto-commit.
if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.ConnectedToDB")); //$NON-NLS-1$
data.db.setQueryLimit(meta.getRowLimit());
return true;
}
catch(KettleException e)
{
logError(BaseMessages.getString(PKG, "DynamicSQLRow.Log.DatabaseError")+e.getMessage()); //$NON-NLS-1$
if(data.db!=null) data.db.disconnect();
}
}
return false;
}
public void dispose(StepMetaInterface smi, StepDataInterface sdi)
{
meta = (DynamicSQLRowMeta)smi;
data = (DynamicSQLRowData)sdi;
if(data.db!=null) data.db.disconnect();
super.dispose(smi, sdi);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.rule;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.sql.type.SqlTypeFactoryImpl;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.DateString;
import org.apache.calcite.util.TimeString;
import org.apache.calcite.util.TimestampString;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.sql.calcite.planner.DruidTypeSystem;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import java.lang.reflect.Field;
import java.math.BigDecimal;
@RunWith(Enclosed.class)
public class DruidLogicalValuesRuleTest
{
private static final PlannerContext DEFAULT_CONTEXT = Mockito.mock(PlannerContext.class);
@RunWith(Parameterized.class)
public static class GetValueFromLiteralSimpleTypesTest
{
@Parameters(name = "{1}, {2}")
public static Iterable<Object[]> constructorFeeder()
{
return ImmutableList.of(
new Object[]{"test", SqlTypeName.CHAR, String.class},
new Object[]{"test", SqlTypeName.VARCHAR, String.class},
new Object[]{0.1, SqlTypeName.DOUBLE, Double.class},
new Object[]{0.1, SqlTypeName.REAL, Double.class},
new Object[]{0.1, SqlTypeName.DECIMAL, Double.class},
new Object[]{1L, SqlTypeName.TINYINT, Long.class},
new Object[]{1L, SqlTypeName.SMALLINT, Long.class},
new Object[]{1L, SqlTypeName.INTEGER, Long.class},
new Object[]{1L, SqlTypeName.BIGINT, Long.class}
);
}
private final Comparable<?> val;
private final SqlTypeName sqlTypeName;
private final Class<?> javaType;
public GetValueFromLiteralSimpleTypesTest(Comparable<?> val, SqlTypeName sqlTypeName, Class<?> javaType)
{
this.val = val;
this.sqlTypeName = sqlTypeName;
this.javaType = javaType;
}
@Test
public void testGetValueFromLiteral()
{
final RexLiteral literal = makeLiteral(val, sqlTypeName, javaType);
final Object fromLiteral = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertSame(javaType, fromLiteral.getClass());
Assert.assertEquals(val, fromLiteral);
Mockito.verify(literal, Mockito.times(1)).getType();
}
private static RexLiteral makeLiteral(Comparable<?> val, SqlTypeName typeName, Class<?> javaType)
{
RelDataType dataType = Mockito.mock(RelDataType.class);
Mockito.when(dataType.getSqlTypeName()).thenReturn(typeName);
RexLiteral literal = Mockito.mock(RexLiteral.class);
try {
Field field = literal.getClass().getSuperclass().getDeclaredField("value");
field.setAccessible(true);
field.set(literal, val);
}
catch (Exception e) {
Assert.fail("Unable to mock the literal for test.\nException: " + e);
}
Mockito.when(literal.getType()).thenReturn(dataType);
Mockito.when(literal.getValueAs(ArgumentMatchers.any())).thenReturn(javaType.cast(val));
return literal;
}
}
public static class GetValueFromLiteralOtherTypesTest
{
private static final PlannerContext DEFAULT_CONTEXT = Mockito.mock(PlannerContext.class);
private static final DateTimeZone TIME_ZONE = DateTimes.inferTzFromString("Asia/Seoul");
private static final RelDataTypeFactory TYPE_FACTORY = new SqlTypeFactoryImpl(DruidTypeSystem.INSTANCE);
private static final RexBuilder REX_BUILDER = new RexBuilder(TYPE_FACTORY);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@BeforeClass
public static void setup()
{
Mockito.when(DEFAULT_CONTEXT.getTimeZone()).thenReturn(TIME_ZONE);
}
@Test
public void testGetValueFromTrueLiteral()
{
RexLiteral literal = REX_BUILDER.makeLiteral(true);
final Object fromLiteral = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertSame(Long.class, fromLiteral.getClass());
Assert.assertEquals(1L, fromLiteral);
}
@Test
public void testGetValueFromFalseLiteral()
{
RexLiteral literal = REX_BUILDER.makeLiteral(false);
final Object fromLiteral = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertSame(Long.class, fromLiteral.getClass());
Assert.assertEquals(0L, fromLiteral);
}
@Test
public void testGetValueFromTimestampLiteral()
{
RexLiteral literal = REX_BUILDER.makeTimestampLiteral(new TimestampString("2021-04-01 16:54:31"), 0);
final Object fromLiteral = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertSame(Long.class, fromLiteral.getClass());
Assert.assertEquals(new DateTime("2021-04-01T16:54:31", TIME_ZONE).getMillis(), fromLiteral);
}
@Test
public void testGetValueFromDateLiteral()
{
RexLiteral literal = REX_BUILDER.makeDateLiteral(new DateString("2021-04-01"));
final Object fromLiteral = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertSame(Long.class, fromLiteral.getClass());
Assert.assertEquals(new DateTime("2021-04-01", TIME_ZONE).getMillis(), fromLiteral);
}
@Test
public void testGetValueFromTimestampWithLocalTimeZoneLiteral()
{
RexLiteral literal = REX_BUILDER.makeTimestampWithLocalTimeZoneLiteral(
new TimestampString("2021-04-01 16:54:31"),
0
);
expectedException.expect(UnsupportedSQLQueryException.class);
expectedException.expectMessage("TIMESTAMP_WITH_LOCAL_TIME_ZONE type is not supported");
DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
}
@Test
public void testGetValueFromTimeLiteral()
{
RexLiteral literal = REX_BUILDER.makeTimeLiteral(new TimeString("16:54:31"), 0);
expectedException.expect(UnsupportedSQLQueryException.class);
expectedException.expectMessage("TIME type is not supported");
DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
}
@Test
public void testGetValueFromTimeWithLocalTimeZoneLiteral()
{
RexLiteral literal = REX_BUILDER.makeTimeWithLocalTimeZoneLiteral(new TimeString("16:54:31"), 0);
expectedException.expect(UnsupportedSQLQueryException.class);
expectedException.expectMessage("TIME_WITH_LOCAL_TIME_ZONE type is not supported");
DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
}
@Test
public void testGetCastedValuesFromFloatToNumeric()
{
RexLiteral literal = REX_BUILDER.makeExactLiteral(
new BigDecimal("123.0"),
TYPE_FACTORY.createSqlType(SqlTypeName.INTEGER)
);
Object value = DruidLogicalValuesRule.getValueFromLiteral(literal, DEFAULT_CONTEXT);
Assert.assertEquals(value, 123L);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.linear;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.util.Precision;
/**
* Class transforming a general real matrix to Hessenberg form.
* <p>A m × m matrix A can be written as the product of three matrices: A = P
* × H × P<sup>T</sup> with P an orthogonal matrix and H a Hessenberg
* matrix. Both P and H are m × m matrices.</p>
* <p>Transformation to Hessenberg form is often not a goal by itself, but it is an
* intermediate step in more general decomposition algorithms like
* {@link EigenDecomposition eigen decomposition}. This class is therefore
* intended for internal use by the library and is not public. As a consequence
* of this explicitly limited scope, many methods directly returns references to
* internal arrays, not copies.</p>
* <p>This class is based on the method orthes in class EigenvalueDecomposition
* from the <a href="http://math.nist.gov/javanumerics/jama/">JAMA</a> library.</p>
*
* @see <a href="http://mathworld.wolfram.com/HessenbergDecomposition.html">MathWorld</a>
* @see <a href="http://en.wikipedia.org/wiki/Householder_transformation">Householder Transformations</a>
* @since 3.1
*/
class HessenbergTransformer {
/** Householder vectors. */
private final double householderVectors[][];
/** Temporary storage vector. */
private final double ort[];
/** Cached value of P. */
private RealMatrix cachedP;
/** Cached value of Pt. */
private RealMatrix cachedPt;
/** Cached value of H. */
private RealMatrix cachedH;
/**
* Build the transformation to Hessenberg form of a general matrix.
*
* @param matrix matrix to transform
* @throws NonSquareMatrixException if the matrix is not square
*/
public HessenbergTransformer(final RealMatrix matrix) {
if (!matrix.isSquare()) {
throw new NonSquareMatrixException(matrix.getRowDimension(),
matrix.getColumnDimension());
}
final int m = matrix.getRowDimension();
householderVectors = matrix.getData();
ort = new double[m];
cachedP = null;
cachedPt = null;
cachedH = null;
// transform matrix
transform();
}
/**
* Returns the matrix P of the transform.
* <p>P is an orthogonal matrix, i.e. its inverse is also its transpose.</p>
*
* @return the P matrix
*/
public RealMatrix getP() {
if (cachedP == null) {
final int n = householderVectors.length;
final int high = n - 1;
final double[][] pa = new double[n][n];
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
pa[i][j] = (i == j) ? 1 : 0;
}
}
for (int m = high - 1; m >= 1; m--) {
if (householderVectors[m][m - 1] != 0.0) {
for (int i = m + 1; i <= high; i++) {
ort[i] = householderVectors[i][m - 1];
}
for (int j = m; j <= high; j++) {
double g = 0.0;
for (int i = m; i <= high; i++) {
g += ort[i] * pa[i][j];
}
// Double division avoids possible underflow
g = (g / ort[m]) / householderVectors[m][m - 1];
for (int i = m; i <= high; i++) {
pa[i][j] += g * ort[i];
}
}
}
}
cachedP = MatrixUtils.createRealMatrix(pa);
}
return cachedP;
}
/**
* Returns the transpose of the matrix P of the transform.
* <p>P is an orthogonal matrix, i.e. its inverse is also its transpose.</p>
*
* @return the transpose of the P matrix
*/
public RealMatrix getPT() {
if (cachedPt == null) {
cachedPt = getP().transpose();
}
// return the cached matrix
return cachedPt;
}
/**
* Returns the Hessenberg matrix H of the transform.
*
* @return the H matrix
*/
public RealMatrix getH() {
if (cachedH == null) {
final int m = householderVectors.length;
final double[][] h = new double[m][m];
for (int i = 0; i < m; ++i) {
if (i > 0) {
// copy the entry of the lower sub-diagonal
h[i][i - 1] = householderVectors[i][i - 1];
}
// copy upper triangular part of the matrix
for (int j = i; j < m; ++j) {
h[i][j] = householderVectors[i][j];
}
}
cachedH = MatrixUtils.createRealMatrix(h);
}
// return the cached matrix
return cachedH;
}
/**
* Get the Householder vectors of the transform.
* <p>Note that since this class is only intended for internal use, it returns
* directly a reference to its internal arrays, not a copy.</p>
*
* @return the main diagonal elements of the B matrix
*/
double[][] getHouseholderVectorsRef() {
return householderVectors;
}
/**
* Transform original matrix to Hessenberg form.
* <p>Transformation is done using Householder transforms.</p>
*/
private void transform() {
final int n = householderVectors.length;
final int high = n - 1;
for (int m = 1; m <= high - 1; m++) {
// Scale column.
double scale = 0;
for (int i = m; i <= high; i++) {
scale += FastMath.abs(householderVectors[i][m - 1]);
}
if (!Precision.equals(scale, 0)) {
// Compute Householder transformation.
double h = 0;
for (int i = high; i >= m; i--) {
ort[i] = householderVectors[i][m - 1] / scale;
h += ort[i] * ort[i];
}
final double g = (ort[m] > 0) ? -FastMath.sqrt(h) : FastMath.sqrt(h);
h -= ort[m] * g;
ort[m] -= g;
// Apply Householder similarity transformation
// H = (I - u*u' / h) * H * (I - u*u' / h)
for (int j = m; j < n; j++) {
double f = 0;
for (int i = high; i >= m; i--) {
f += ort[i] * householderVectors[i][j];
}
f /= h;
for (int i = m; i <= high; i++) {
householderVectors[i][j] -= f * ort[i];
}
}
for (int i = 0; i <= high; i++) {
double f = 0;
for (int j = high; j >= m; j--) {
f += ort[j] * householderVectors[i][j];
}
f /= h;
for (int j = m; j <= high; j++) {
householderVectors[i][j] -= f * ort[j];
}
}
ort[m] = scale * ort[m];
householderVectors[m][m - 1] = scale * g;
}
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.mysql;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ext.mysql.model.MySQLDataSource;
import org.jkiss.dbeaver.model.DBPDataSource;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.model.connection.DBPConnectionConfiguration;
import org.jkiss.dbeaver.model.connection.DBPDriver;
import org.jkiss.dbeaver.model.connection.DBPNativeClientLocation;
import org.jkiss.dbeaver.model.connection.DBPNativeClientLocationManager;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSourceProvider;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.OSDescriptor;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.utils.GeneralUtils;
import org.jkiss.dbeaver.utils.WindowsRegistry;
import org.jkiss.utils.CommonUtils;
import org.jkiss.utils.IOUtils;
import org.jkiss.utils.StandardConstants;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.util.*;
public class MySQLDataSourceProvider extends JDBCDataSourceProvider implements DBPNativeClientLocationManager {
private static final Log log = Log.getLog(MySQLDataSourceProvider.class);
private static final String REGISTRY_ROOT_MYSQL_32 = "SOFTWARE\\MySQL AB";
private static final String REGISTRY_ROOT_MYSQL_64 = "SOFTWARE\\Wow6432Node\\MYSQL AB";
private static final String REGISTRY_ROOT_MARIADB = "SOFTWARE\\Monty Program AB";
private static final String SERER_LOCATION_KEY = "Location";
private static final String INSTALLDIR_KEY = "INSTALLDIR";
//private static final String SERER_VERSION_KEY = "Version";
private static Map<String,MySQLServerHome> localServers = null;
private static Map<String,String> connectionsProps;
static {
connectionsProps = new HashMap<>();
// Prevent stupid errors "Cannot convert value '0000-00-00 00:00:00' from column X to TIMESTAMP"
// Widely appears in MyISAM tables (joomla, etc)
//connectionsProps.put("zeroDateTimeBehavior", "CONVERT_TO_NULL");
// Set utf-8 as default charset
connectionsProps.put("characterEncoding", GeneralUtils.UTF8_ENCODING);
connectionsProps.put("tinyInt1isBit", "false");
// Auth plugins
// connectionsProps.put("authenticationPlugins",
// "com.mysql.jdbc.authentication.MysqlClearPasswordPlugin," +
// "com.mysql.jdbc.authentication.MysqlOldPasswordPlugin," +
// "org.jkiss.jdbc.mysql.auth.DialogAuthenticationPlugin");
}
public static Map<String,String> getConnectionsProps() {
return connectionsProps;
}
public MySQLDataSourceProvider()
{
}
@Override
protected String getConnectionPropertyDefaultValue(String name, String value) {
String ovrValue = connectionsProps.get(name);
return ovrValue != null ? ovrValue : super.getConnectionPropertyDefaultValue(name, value);
}
@Override
public long getFeatures()
{
return FEATURE_SCHEMAS;
}
@Override
public String getConnectionURL(DBPDriver driver, DBPConnectionConfiguration connectionInfo)
{
/*
String trustStorePath = System.getProperty(StandardConstants.ENV_USER_HOME) + "/.keystore";
System.setProperty("javax.net.ssl.keyStore", trustStorePath);
System.setProperty("javax.net.ssl.keyStorePassword", "changeit");
System.setProperty("javax.net.ssl.trustStore", trustStorePath);
System.setProperty("javax.net.ssl.trustStorePassword", "changeit");
*/
StringBuilder url = new StringBuilder();
url.append("jdbc:mysql://")
.append(connectionInfo.getHostName());
if (!CommonUtils.isEmpty(connectionInfo.getHostPort())) {
url.append(":").append(connectionInfo.getHostPort());
}
url.append("/");
if (!CommonUtils.isEmpty(connectionInfo.getDatabaseName())) {
url.append(connectionInfo.getDatabaseName());
}
return url.toString();
}
@NotNull
@Override
public DBPDataSource openDataSource(
@NotNull DBRProgressMonitor monitor, @NotNull DBPDataSourceContainer container)
throws DBException
{
return new MySQLDataSource(monitor, container);
}
//////////////////////////////////////
// Client manager
@Override
public List<DBPNativeClientLocation> findLocalClientLocations()
{
findLocalClients();
return new ArrayList<>(localServers.values());
}
@Override
public DBPNativeClientLocation getDefaultLocalClientLocation()
{
findLocalClients();
return localServers.isEmpty() ? null : localServers.values().iterator().next();
}
@Override
public String getProductName(DBPNativeClientLocation location) throws DBException {
return "MySQL/MariaDB";
}
@Override
public String getProductVersion(DBPNativeClientLocation location) throws DBException {
return getFullServerVersion(location.getPath());
}
public static MySQLServerHome getServerHome(String homeId)
{
findLocalClients();
MySQLServerHome home = localServers.get(homeId);
return home == null ? new MySQLServerHome(homeId, homeId) : home;
}
public synchronized static void findLocalClients()
{
if (localServers != null) {
return;
}
localServers = new LinkedHashMap<>();
// read from path
String path = System.getenv("PATH");
if (path != null) {
for (String token : path.split(System.getProperty(StandardConstants.ENV_PATH_SEPARATOR))) {
token = CommonUtils.removeTrailingSlash(token);
File mysqlFile = new File(token, MySQLUtils.getMySQLConsoleBinaryName());
if (mysqlFile.exists()) {
File binFolder = mysqlFile.getAbsoluteFile().getParentFile();//.getName()
if (binFolder.getName().equalsIgnoreCase("bin")) {
String homeId = CommonUtils.removeTrailingSlash(binFolder.getParentFile().getAbsolutePath());
localServers.put(homeId, new MySQLServerHome(homeId, null));
}
}
}
}
// find homes in Windows registry
OSDescriptor localSystem = DBWorkbench.getPlatform().getLocalSystem();
if (localSystem.isWindows()) {
try {
// Search MySQL entries
{
final String registryRoot = localSystem.is64() ? REGISTRY_ROOT_MYSQL_64 : REGISTRY_ROOT_MYSQL_32;
List<String> homeKeys = WindowsRegistry.getInstance().readStringSubKeys(WindowsRegistry.HKEY_LOCAL_MACHINE, registryRoot);
if (homeKeys != null) {
for (String homeKey : homeKeys) {
Map<String, String> valuesMap = WindowsRegistry.getInstance().readStringValues(WindowsRegistry.HKEY_LOCAL_MACHINE, registryRoot + "\\" + homeKey);
if (valuesMap != null) {
for (String key : valuesMap.keySet()) {
if (SERER_LOCATION_KEY.equalsIgnoreCase(key)) {
String serverPath = CommonUtils.removeTrailingSlash(valuesMap.get(key));
if (new File(serverPath, "bin").exists()) {
localServers.put(serverPath, new MySQLServerHome(serverPath, homeKey));
}
}
}
}
}
}
}
// Search MariaDB entries
{
List<String> homeKeys = WindowsRegistry.getInstance().readStringSubKeys(WindowsRegistry.HKEY_LOCAL_MACHINE, REGISTRY_ROOT_MARIADB);
if (homeKeys != null) {
for (String homeKey : homeKeys) {
Map<String, String> valuesMap = WindowsRegistry.getInstance().readStringValues(WindowsRegistry.HKEY_LOCAL_MACHINE, REGISTRY_ROOT_MARIADB + "\\" + homeKey);
if (valuesMap != null) {
for (String key : valuesMap.keySet()) {
if (INSTALLDIR_KEY.equalsIgnoreCase(key)) {
String serverPath = CommonUtils.removeTrailingSlash(valuesMap.get(key));
if (new File(serverPath, "bin").exists()) {
localServers.put(serverPath, new MySQLServerHome(serverPath, homeKey));
}
}
}
}
}
}
}
} catch (Throwable e) {
log.warn("Error reading Windows registry", e);
}
}
}
static String getFullServerVersion(File path)
{
File binPath = path;
File binSubfolder = new File(binPath, "bin");
if (binSubfolder.exists()) {
binPath = binSubfolder;
}
String cmd = new File(
binPath,
MySQLUtils.getMySQLConsoleBinaryName()).getAbsolutePath();
try {
Process p = Runtime.getRuntime().exec(new String[] {cmd, "-V"});
try {
BufferedReader input = new BufferedReader(new InputStreamReader(p.getInputStream()));
try {
String line;
while ((line = input.readLine()) != null) {
int pos = line.indexOf("Distrib ");
if (pos != -1) {
pos += 8;
int pos2 = line.indexOf(",", pos);
return line.substring(pos, pos2);
}
}
} finally {
IOUtils.close(input);
}
} finally {
p.destroy();
}
}
catch (Exception ex) {
log.warn("Error reading MySQL server version from " + cmd, ex);
}
return null;
}
}
| |
package org.apache.solr.schema;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getSourceCopyFields;
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.Utils;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.BeforeClass;
import org.junit.Test;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase {
static final Logger log = LoggerFactory.getLogger(TestBulkSchemaConcurrent.class);
private List<RestTestHarness> restTestHarnesses = new ArrayList<>();
@BeforeClass
public static void initSysProperties() {
System.setProperty("managed.schema.mutable", "true");
System.setProperty("enable.update.log", "true");
}
protected String getCloudSolrConfig() {
return "solrconfig-managed-schema.xml";
}
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
return ((HttpSolrClient)client).getBaseURL();
}
});
restTestHarnesses.add(harness);
}
}
@Override
public void distribTearDown() throws Exception {
super.distribTearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Test
public void test() throws Exception {
final int threadCount = 5;
setupHarnesses();
Thread[] threads = new Thread[threadCount];
final List<List> collectErrors = new ArrayList<>();
for (int i = 0 ; i < threadCount ; i++) {
final int finalI = i;
threads[i] = new Thread(){
@Override
public void run() {
ArrayList errs = new ArrayList();
collectErrors.add(errs);
try {
invokeBulkAddCall(finalI, errs);
invokeBulkReplaceCall(finalI, errs);
invokeBulkDeleteCall(finalI, errs);
} catch (Exception e) {
e.printStackTrace();
}
}
};
threads[i].start();
}
for (Thread thread : threads) thread.join();
boolean success = true;
for (List e : collectErrors) {
if (e != null && !e.isEmpty()) {
success = false;
log.error(e.toString());
}
}
assertTrue(collectErrors.toString(), success);
}
private void invokeBulkAddCall(int seed, ArrayList<String> errs) throws Exception {
String payload = "{\n" +
" 'add-field' : {\n" +
" 'name':'replaceFieldA',\n" +
" 'type': 'string',\n" +
" 'stored':true,\n" +
" 'indexed':false\n" +
" },\n" +
" 'add-dynamic-field' : {\n" +
" 'name' :'replaceDynamicField',\n" +
" 'type':'string',\n" +
" 'stored':true,\n" +
" 'indexed':true\n" +
" },\n" +
" 'add-copy-field' : {\n" +
" 'source' :'replaceFieldA',\n" +
" 'dest':['replaceDynamicCopyFieldDest']\n" +
" },\n" +
" 'add-field-type' : {\n" +
" 'name' :'myNewFieldTypeName',\n" +
" 'class' : 'solr.StrField',\n" +
" 'sortMissingLast':'true'\n" +
" }\n" +
" }";
String aField = "a" + seed;
String dynamicFldName = "*_lol" + seed;
String dynamicCopyFldDest = "hello_lol"+seed;
String newFieldTypeName = "mystr" + seed;
payload = payload.replace("replaceFieldA", aField);
payload = payload.replace("replaceDynamicField", dynamicFldName);
payload = payload.replace("replaceDynamicCopyFieldDest", dynamicCopyFldDest);
payload = payload.replace("myNewFieldTypeName", newFieldTypeName);
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response = publisher.post("/schema?wt=json", SolrTestCaseJ4.json(payload));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
//get another node
Set<String> errmessages = new HashSet<>();
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
try {
long startTime = System.nanoTime();
long maxTimeoutMillis = 100000;
while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if (m == null) errmessages.add(StrUtils.formatString("field {0} not created", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} not created", dynamicFldName));
List l = getSourceCopyFields(harness, aField);
if (!checkCopyField(l, aField, dynamicCopyFldDest))
errmessages.add(StrUtils.formatString
("CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest));
m = getObj(harness, newFieldTypeName, "fieldTypes");
if (m == null) errmessages.add(StrUtils.formatString("new type {0} not created", newFieldTypeName));
if (errmessages.isEmpty()) break;
Thread.sleep(10);
}
} finally {
harness.close();
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
}
}
private void invokeBulkReplaceCall(int seed, ArrayList<String> errs) throws Exception {
String payload = "{\n" +
" 'replace-field' : {\n" +
" 'name':'replaceFieldA',\n" +
" 'type': 'text',\n" +
" 'stored':true,\n" +
" 'indexed':true\n" +
" },\n" +
" 'replace-dynamic-field' : {\n" +
" 'name' :'replaceDynamicField',\n" +
" 'type':'text',\n" +
" 'stored':true,\n" +
" 'indexed':true\n" +
" },\n" +
" 'replace-field-type' : {\n" +
" 'name' :'myNewFieldTypeName',\n" +
" 'class' : 'solr.TextField'\n" +
" }\n" +
" }";
String aField = "a" + seed;
String dynamicFldName = "*_lol" + seed;
String dynamicCopyFldDest = "hello_lol"+seed;
String newFieldTypeName = "mystr" + seed;
payload = payload.replace("replaceFieldA", aField);
payload = payload.replace("replaceDynamicField", dynamicFldName);
payload = payload.replace("myNewFieldTypeName", newFieldTypeName);
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response = publisher.post("/schema?wt=json", SolrTestCaseJ4.json(payload));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
//get another node
Set<String> errmessages = new HashSet<>();
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
try {
long startTime = System.nanoTime();
long maxTimeoutMillis = 100000;
while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if (m == null) errmessages.add(StrUtils.formatString("field {0} no longer present", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if (m == null) errmessages.add(StrUtils.formatString("dynamic field {0} no longer present", dynamicFldName));
List l = getSourceCopyFields(harness, aField);
if (!checkCopyField(l, aField, dynamicCopyFldDest))
errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} no longer present", aField, dynamicCopyFldDest));
m = getObj(harness, newFieldTypeName, "fieldTypes");
if (m == null) errmessages.add(StrUtils.formatString("new type {0} no longer present", newFieldTypeName));
if (errmessages.isEmpty()) break;
Thread.sleep(10);
}
} finally {
harness.close();
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
}
}
private void invokeBulkDeleteCall(int seed, ArrayList<String> errs) throws Exception {
String payload = "{\n" +
" 'delete-copy-field' : {\n" +
" 'source' :'replaceFieldA',\n" +
" 'dest':['replaceDynamicCopyFieldDest']\n" +
" },\n" +
" 'delete-field' : {'name':'replaceFieldA'},\n" +
" 'delete-dynamic-field' : {'name' :'replaceDynamicField'},\n" +
" 'delete-field-type' : {'name' :'myNewFieldTypeName'}\n" +
" }";
String aField = "a" + seed;
String dynamicFldName = "*_lol" + seed;
String dynamicCopyFldDest = "hello_lol"+seed;
String newFieldTypeName = "mystr" + seed;
payload = payload.replace("replaceFieldA", aField);
payload = payload.replace("replaceDynamicField", dynamicFldName);
payload = payload.replace("replaceDynamicCopyFieldDest",dynamicCopyFldDest);
payload = payload.replace("myNewFieldTypeName", newFieldTypeName);
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
String response = publisher.post("/schema?wt=json", SolrTestCaseJ4.json(payload));
Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if (errors != null) {
errs.add(new String(Utils.toJSON(errors), StandardCharsets.UTF_8));
return;
}
//get another node
Set<String> errmessages = new HashSet<>();
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
try {
long startTime = System.nanoTime();
long maxTimeoutMillis = 100000;
while (TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if (m != null) errmessages.add(StrUtils.formatString("field {0} still exists", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if (m != null) errmessages.add(StrUtils.formatString("dynamic field {0} still exists", dynamicFldName));
List l = getSourceCopyFields(harness, aField);
if (checkCopyField(l, aField, dynamicCopyFldDest))
errmessages.add(StrUtils.formatString("CopyField source={0},dest={1} still exists", aField, dynamicCopyFldDest));
m = getObj(harness, newFieldTypeName, "fieldTypes");
if (m != null) errmessages.add(StrUtils.formatString("new type {0} still exists", newFieldTypeName));
if (errmessages.isEmpty()) break;
Thread.sleep(10);
}
} finally {
harness.close();
}
if (!errmessages.isEmpty()) {
errs.addAll(errmessages);
}
}
private boolean checkCopyField(List<Map> l, String src, String dest) {
if (l == null) return false;
for (Map map : l) {
if (src.equals(map.get("source")) && dest.equals(map.get("dest")))
return true;
}
return false;
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
*
* (C) Copyright IBM Corp. 1999 All Rights Reserved.
* Copyright 1997 The Open Group Research Institute. All rights reserved.
*/
package sun.security.krb5.internal;
import sun.security.krb5.*;
import sun.security.util.*;
import java.util.Vector;
import java.io.IOException;
import java.math.BigInteger;
/**
* Implements the ASN.1 KDC-REQ-BODY type.
*
* <xmp>
* KDC-REQ-BODY ::= SEQUENCE {
* kdc-options [0] KDCOptions,
* cname [1] PrincipalName OPTIONAL
* -- Used only in AS-REQ --,
* realm [2] Realm
* -- Server's realm
* -- Also client's in AS-REQ --,
* sname [3] PrincipalName OPTIONAL,
* from [4] KerberosTime OPTIONAL,
* till [5] KerberosTime,
* rtime [6] KerberosTime OPTIONAL,
* nonce [7] UInt32,
* etype [8] SEQUENCE OF Int32 -- EncryptionType
* -- in preference order --,
* addresses [9] HostAddresses OPTIONAL,
* enc-authorization-data [10] EncryptedData OPTIONAL
* -- AuthorizationData --,
* additional-tickets [11] SEQUENCE OF Ticket OPTIONAL
* -- NOTE: not empty
* }
* </xmp>
*
* <p>
* This definition reflects the Network Working Group RFC 4120
* specification available at
* <a href="http://www.ietf.org/rfc/rfc4120.txt">
* http://www.ietf.org/rfc/rfc4120.txt</a>.
*/
public class KDCReqBody {
public KDCOptions kdcOptions;
public PrincipalName cname; //optional in ASReq only
public PrincipalName sname; //optional
public KerberosTime from; //optional
public KerberosTime till;
public KerberosTime rtime; //optional
public HostAddresses addresses; //optional
private int nonce;
private int[] eType = null; //a sequence; not optional
private EncryptedData encAuthorizationData; //optional
private Ticket[] additionalTickets; //optional
public KDCReqBody(
KDCOptions new_kdcOptions,
PrincipalName new_cname, //optional in ASReq only
PrincipalName new_sname, //optional
KerberosTime new_from, //optional
KerberosTime new_till,
KerberosTime new_rtime, //optional
int new_nonce,
int[] new_eType, //a sequence; not optional
HostAddresses new_addresses, //optional
EncryptedData new_encAuthorizationData, //optional
Ticket[] new_additionalTickets //optional
) throws IOException {
kdcOptions = new_kdcOptions;
cname = new_cname;
sname = new_sname;
from = new_from;
till = new_till;
rtime = new_rtime;
nonce = new_nonce;
if (new_eType != null) {
eType = new_eType.clone();
}
addresses = new_addresses;
encAuthorizationData = new_encAuthorizationData;
if (new_additionalTickets != null) {
additionalTickets = new Ticket[new_additionalTickets.length];
for (int i = 0; i < new_additionalTickets.length; i++) {
if (new_additionalTickets[i] == null) {
throw new IOException("Cannot create a KDCReqBody");
} else {
additionalTickets[i] = (Ticket)new_additionalTickets[i].clone();
}
}
}
}
/**
* Constructs a KDCReqBody object.
* @param encoding a DER-encoded data.
* @param msgType an int indicating whether it's KRB_AS_REQ or KRB_TGS_REQ type.
* @exception Asn1Exception if an error occurs while decoding an ASN1 encoded data.
* @exception IOException if an I/O error occurs while reading encoded data.
* @exception RealmException if an error occurs while constructing a Realm object from the encoded data.
*
*/
public KDCReqBody(DerValue encoding, int msgType)
throws Asn1Exception, RealmException, KrbException, IOException {
DerValue der, subDer;
addresses = null;
encAuthorizationData = null;
additionalTickets = null;
if (encoding.getTag() != DerValue.tag_Sequence) {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
kdcOptions = KDCOptions.parse(encoding.getData(), (byte)0x00, false);
// cname only appears in AS-REQ and it shares the realm field with
// sname. This is the only place where realm comes after the name.
// We first give cname a fake realm and reassign it the correct
// realm after the realm field is read.
cname = PrincipalName.parse(encoding.getData(), (byte)0x01, true,
new Realm("PLACEHOLDER"));
if ((msgType != Krb5.KRB_AS_REQ) && (cname != null)) {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
Realm realm = Realm.parse(encoding.getData(), (byte)0x02, false);
if (cname != null) {
cname = new PrincipalName(
cname.getNameType(), cname.getNameStrings(), realm);
}
sname = PrincipalName.parse(encoding.getData(), (byte)0x03, true, realm);
from = KerberosTime.parse(encoding.getData(), (byte)0x04, true);
till = KerberosTime.parse(encoding.getData(), (byte)0x05, false);
rtime = KerberosTime.parse(encoding.getData(), (byte)0x06, true);
der = encoding.getData().getDerValue();
if ((der.getTag() & (byte)0x1F) == (byte)0x07) {
nonce = der.getData().getBigInteger().intValue();
} else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
der = encoding.getData().getDerValue();
Vector<Integer> v = new Vector<>();
if ((der.getTag() & (byte)0x1F) == (byte)0x08) {
subDer = der.getData().getDerValue();
if (subDer.getTag() == DerValue.tag_SequenceOf) {
while(subDer.getData().available() > 0) {
v.addElement(subDer.getData().getBigInteger().intValue());
}
eType = new int[v.size()];
for (int i = 0; i < v.size(); i++) {
eType[i] = v.elementAt(i);
}
} else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
} else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
if (encoding.getData().available() > 0) {
addresses = HostAddresses.parse(encoding.getData(), (byte)0x09, true);
}
if (encoding.getData().available() > 0) {
encAuthorizationData = EncryptedData.parse(encoding.getData(), (byte)0x0A, true);
}
if (encoding.getData().available() > 0) {
Vector<Ticket> tempTickets = new Vector<>();
der = encoding.getData().getDerValue();
if ((der.getTag() & (byte)0x1F) == (byte)0x0B) {
subDer = der.getData().getDerValue();
if (subDer.getTag() == DerValue.tag_SequenceOf) {
while (subDer.getData().available() > 0) {
tempTickets.addElement(new Ticket(subDer.getData().getDerValue()));
}
} else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
if (tempTickets.size() > 0) {
additionalTickets = new Ticket[tempTickets.size()];
tempTickets.copyInto(additionalTickets);
}
} else {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
}
if (encoding.getData().available() > 0) {
throw new Asn1Exception(Krb5.ASN1_BAD_ID);
}
}
/**
* Encodes this object to an OutputStream.
*
* @return an byte array of encoded data.
* @exception Asn1Exception if an error occurs while decoding an ASN1 encoded data.
* @exception IOException if an I/O error occurs while reading encoded data.
*
*/
public byte[] asn1Encode(int msgType) throws Asn1Exception, IOException {
Vector<DerValue> v = new Vector<>();
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x00), kdcOptions.asn1Encode()));
if (msgType == Krb5.KRB_AS_REQ) {
if (cname != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x01), cname.asn1Encode()));
}
}
if (sname != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x02), sname.getRealm().asn1Encode()));
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x03), sname.asn1Encode()));
} else if (cname != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x02), cname.getRealm().asn1Encode()));
}
if (from != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x04), from.asn1Encode()));
}
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x05), till.asn1Encode()));
if (rtime != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x06), rtime.asn1Encode()));
}
DerOutputStream temp = new DerOutputStream();
temp.putInteger(BigInteger.valueOf(nonce));
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x07), temp.toByteArray()));
//revisit, if empty eType sequences are allowed
temp = new DerOutputStream();
for (int i = 0; i < eType.length; i++) {
temp.putInteger(BigInteger.valueOf(eType[i]));
}
DerOutputStream eTypetemp = new DerOutputStream();
eTypetemp.write(DerValue.tag_SequenceOf, temp);
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x08), eTypetemp.toByteArray()));
if (addresses != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x09), addresses.asn1Encode()));
}
if (encAuthorizationData != null) {
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x0A), encAuthorizationData.asn1Encode()));
}
if (additionalTickets != null && additionalTickets.length > 0) {
temp = new DerOutputStream();
for (int i = 0; i < additionalTickets.length; i++) {
temp.write(additionalTickets[i].asn1Encode());
}
DerOutputStream ticketsTemp = new DerOutputStream();
ticketsTemp.write(DerValue.tag_SequenceOf, temp);
v.addElement(new DerValue(DerValue.createTag(DerValue.TAG_CONTEXT, true, (byte)0x0B), ticketsTemp.toByteArray()));
}
DerValue der[] = new DerValue[v.size()];
v.copyInto(der);
temp = new DerOutputStream();
temp.putSequence(der);
return temp.toByteArray();
}
public int getNonce() {
return nonce;
}
}
| |
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.internal.util.reflection;
import org.mockito.exceptions.base.MockitoException;
import org.mockito.internal.util.MockUtil;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Initialize a field with type instance if a default constructor can be found.
*
* <p>
* If the given field is already initialized, then <strong>the actual instance is returned</strong>.
* This initializer doesn't work with inner classes, local classes, interfaces or abstract types.
* </p>
*
*/
public class FieldInitializer {
private final Object fieldOwner;
private final Field field;
private final ConstructorInstantiator instantiator;
/**
* Prepare initializer with the given field on the given instance.
*
* <p>
* This constructor fail fast if the field type cannot be handled.
* </p>
*
* @param fieldOwner Instance of the test.
* @param field Field to be initialize.
*/
public FieldInitializer(Object fieldOwner, Field field) {
this(fieldOwner, field, new NoArgConstructorInstantiator(fieldOwner, field));
}
/**
* Prepare initializer with the given field on the given instance.
*
* <p>
* This constructor fail fast if the field type cannot be handled.
* </p>
*
* @param fieldOwner Instance of the test.
* @param field Field to be initialize.
* @param argResolver Constructor parameters resolver
*/
public FieldInitializer(Object fieldOwner, Field field, ConstructorArgumentResolver argResolver) {
this(fieldOwner, field, new ParameterizedConstructorInstantiator(fieldOwner, field, argResolver));
}
private FieldInitializer(Object fieldOwner, Field field, ConstructorInstantiator instantiator) {
if(new FieldReader(fieldOwner, field).isNull()) {
checkNotLocal(field);
checkNotInner(field);
checkNotInterface(field);
checkNotAbstract(field);
}
this.fieldOwner = fieldOwner;
this.field = field;
this.instantiator = instantiator;
}
/**
* Initialize field if not initialized and return the actual instance.
*
* @return Actual field instance.
*/
public FieldInitializationReport initialize() {
final AccessibilityChanger changer = new AccessibilityChanger();
changer.enableAccess(field);
try {
return acquireFieldInstance();
} catch(IllegalAccessException e) {
throw new MockitoException("Problems initializing field '" + field.getName() + "' of type '" + field.getType().getSimpleName() + "'", e);
} finally {
changer.safelyDisableAccess(field);
}
}
private void checkNotLocal(Field field) {
if(field.getType().isLocalClass()) {
throw new MockitoException("the type '" + field.getType().getSimpleName() + "' is a local class.");
}
}
private void checkNotInner(Field field) {
if(field.getType().isMemberClass() && !Modifier.isStatic(field.getType().getModifiers())) {
throw new MockitoException("the type '" + field.getType().getSimpleName() + "' is an inner class.");
}
}
private void checkNotInterface(Field field) {
if(field.getType().isInterface()) {
throw new MockitoException("the type '" + field.getType().getSimpleName() + "' is an interface.");
}
}
private void checkNotAbstract(Field field) {
if(Modifier.isAbstract(field.getType().getModifiers())) {
throw new MockitoException("the type '" + field.getType().getSimpleName() + " is an abstract class.");
}
}
private FieldInitializationReport acquireFieldInstance() throws IllegalAccessException {
Object fieldInstance = field.get(fieldOwner);
if(fieldInstance != null) {
return new FieldInitializationReport(fieldInstance, false, false);
}
return instantiator.instantiate();
}
/**
* Represents the strategy used to resolve actual instances
* to be given to a constructor given the argument types.
*/
public interface ConstructorArgumentResolver {
/**
* Try to resolve instances from types.
*
* <p>
* Checks on the real argument type or on the correct argument number
* will happen during the field initialization {@link FieldInitializer#initialize()}.
* I.e the only responsibility of this method, is to provide instances <strong>if possible</strong>.
* </p>
*
* @param argTypes Constructor argument types, should not be null.
* @return The argument instances to be given to the constructor, should not be null.
*/
Object[] resolveTypeInstances(Class<?>... argTypes);
}
private interface ConstructorInstantiator {
FieldInitializationReport instantiate();
}
/**
* Constructor instantiating strategy for no-arg constructor.
*
* <p>
* If a no-arg constructor can be found then the instance is created using
* this constructor.
* Otherwise a technical MockitoException is thrown.
* </p>
*/
static class NoArgConstructorInstantiator implements ConstructorInstantiator {
private final Object testClass;
private final Field field;
/**
* Internal, checks are done by FieldInitializer.
* Fields are assumed to be accessible.
*/
NoArgConstructorInstantiator(Object testClass, Field field) {
this.testClass = testClass;
this.field = field;
}
public FieldInitializationReport instantiate() {
final AccessibilityChanger changer = new AccessibilityChanger();
Constructor<?> constructor = null;
try {
constructor = field.getType().getDeclaredConstructor();
changer.enableAccess(constructor);
final Object[] noArg = new Object[0];
Object newFieldInstance = constructor.newInstance(noArg);
new FieldSetter(testClass, field).set(newFieldInstance);
return new FieldInitializationReport(field.get(testClass), true, false);
} catch (NoSuchMethodException e) {
throw new MockitoException("the type '" + field.getType().getSimpleName() + "' has no default constructor", e);
} catch (InvocationTargetException e) {
throw new MockitoException("the default constructor of type '" + field.getType().getSimpleName() + "' has raised an exception (see the stack trace for cause): " + e.getTargetException().toString(), e);
} catch (InstantiationException e) {
throw new MockitoException("InstantiationException (see the stack trace for cause): " + e.toString(), e);
} catch (IllegalAccessException e) {
throw new MockitoException("IllegalAccessException (see the stack trace for cause): " + e.toString(), e);
} finally {
if(constructor != null) {
changer.safelyDisableAccess(constructor);
}
}
}
}
/**
* Constructor instantiating strategy for parameterized constructors.
*
* <p>
* Choose the constructor with the highest number of parameters, then
* call the ConstructorArgResolver to get actual argument instances.
* If the argResolver fail, then a technical MockitoException is thrown is thrown.
* Otherwise the instance is created with the resolved arguments.
* </p>
*/
static class ParameterizedConstructorInstantiator implements ConstructorInstantiator {
private final Object testClass;
private final Field field;
private final ConstructorArgumentResolver argResolver;
private final MockUtil mockUtil = new MockUtil();
private final Comparator<Constructor<?>> byParameterNumber = new Comparator<Constructor<?>>() {
public int compare(Constructor<?> constructorA, Constructor<?> constructorB) {
int argLengths = constructorB.getParameterTypes().length - constructorA.getParameterTypes().length;
if (argLengths == 0) {
int constructorAMockableParamsSize = countMockableParams(constructorA);
int constructorBMockableParamsSize = countMockableParams(constructorB);
return constructorBMockableParamsSize - constructorAMockableParamsSize;
}
return argLengths;
}
private int countMockableParams(Constructor<?> constructor) {
int constructorMockableParamsSize = 0;
for (Class<?> aClass : constructor.getParameterTypes()) {
if(mockUtil.isTypeMockable(aClass)){
constructorMockableParamsSize++;
}
}
return constructorMockableParamsSize;
}
};
/**
* Internal, checks are done by FieldInitializer.
* Fields are assumed to be accessible.
*/
ParameterizedConstructorInstantiator(Object testClass, Field field, ConstructorArgumentResolver argumentResolver) {
this.testClass = testClass;
this.field = field;
this.argResolver = argumentResolver;
}
public FieldInitializationReport instantiate() {
final AccessibilityChanger changer = new AccessibilityChanger();
Constructor<?> constructor = null;
try {
constructor = biggestConstructor(field.getType());
changer.enableAccess(constructor);
final Object[] args = argResolver.resolveTypeInstances(constructor.getParameterTypes());
Object newFieldInstance = constructor.newInstance(args);
new FieldSetter(testClass, field).set(newFieldInstance);
return new FieldInitializationReport(field.get(testClass), false, true);
} catch (IllegalArgumentException e) {
throw new MockitoException("internal error : argResolver provided incorrect types for constructor " + constructor + " of type " + field.getType().getSimpleName(), e);
} catch (InvocationTargetException e) {
throw new MockitoException("the constructor of type '" + field.getType().getSimpleName() + "' has raised an exception (see the stack trace for cause): " + e.getTargetException().toString(), e);
} catch (InstantiationException e) {
throw new MockitoException("InstantiationException (see the stack trace for cause): " + e.toString(), e);
} catch (IllegalAccessException e) {
throw new MockitoException("IllegalAccessException (see the stack trace for cause): " + e.toString(), e);
} finally {
if(constructor != null) {
changer.safelyDisableAccess(constructor);
}
}
}
private void checkParameterized(Constructor<?> constructor, Field field) {
if(constructor.getParameterTypes().length == 0) {
throw new MockitoException("the field " + field.getName() + " of type " + field.getType() + " has no parameterized constructor");
}
}
private Constructor<?> biggestConstructor(Class<?> clazz) {
final List<Constructor<?>> constructors = Arrays.asList(clazz.getDeclaredConstructors());
Collections.sort(constructors, byParameterNumber);
Constructor<?> constructor = constructors.get(0);
checkParameterized(constructor, field);
return constructor;
}
}
}
| |
package uk.ac.ebi.spot.ols.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import org.semanticweb.owlapi.model.IRI;
import uk.ac.ebi.spot.ols.exception.OntologyLoadingException;
import uk.ac.ebi.spot.ols.loader.OntologyLoader;
import java.io.*;
import java.util.Collection;
/**
* Created by catherineleroy on 16/03/2015.<br>
*<br>
* This class contains the utility method to create for term a bbob json graph describing the tree with all the siblings,
* parents, parent's sibling of this term.<br>
* To find out more about bbop graph :<br>
* https://github.com/berkeleybop/bbop-js/wiki/Graph<br>
*<br>
* This is an example of a bbpop graph : <br>
* <br>
* {<br>
* "nodes": [<br>
* {<br>
* "id": "GO:0043474",<br>
* "lbl": "pigment metabolic process involved in pigmentation"<br>
* },<br>
* {<br>
* "id": "GO:0043475",<br>
* "lbl": "pigment metabolic process involved in pigment accumulation"<br>
* }<br>
* ],<br>
* "edges": [<br>
* {<br>
* "sub": "GO:0043475",<br>
* "obj": "GO:0043474",<br>
* "pred": "is_a"<br>
* }<br>
* ]<br>
* }<br>
*<br>
*/
public class SiblingGraphCreator {
/**
* Build the bbop graph containing all the parents, sibling, sibling parents, describing how those relate and
* flush it to the given OutputStream.<br>
*<br>
* @param loader - an ontology loader for the ontology your interested in<br>
* @param classTerm - the IRI of the term for which you want to build the sibbling graph<br>
* @param out - an OutputStream in which the graph will be saved<br>
*<br>
* @throws IOException<br>
*/
public void buildBpopGraph(OntologyLoader loader, IRI classTerm, OutputStream out) throws IOException {
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(out);
//Start the json object
jsonGenerator.writeStartObject();
// Building the "nodes" section of the json file in which we describe all the nodes that will be represented
// in the graph.
jsonGenerator.writeArrayFieldStart("nodes");
jsonGenerator = addParentsNodes(classTerm, loader, jsonGenerator);
jsonGenerator.writeEndArray();
// Building the "edges" section of the json file in which we describe the relationship between the nodes previously
// described in the nodes section
jsonGenerator.writeArrayFieldStart("edges");
jsonGenerator = addParentsEdges(classTerm, loader, jsonGenerator);
jsonGenerator.writeEndArray();
//Close the json object
jsonGenerator.writeEndObject();
//Flush and close so that the json is written in the given OutputStream object
jsonGenerator.flush();
jsonGenerator.close();
return;
}
public JsonGenerator addChildrenNodes(IRI term, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directChildren = loader.getDirectChildTerms(term);
for (IRI directChild : directChildren) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("IRI", directChild.toString());
jsonGenerator.writeEndObject();
jsonGenerator = addChildrenNodes(directChild, loader, jsonGenerator);
}
return jsonGenerator;
}
public JsonGenerator addChildrenEdges(IRI term, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directChildren = loader.getDirectChildTerms(term);
for (IRI directChild : directChildren) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("pred", "is_a");
jsonGenerator.writeStringField("obj", directChild.toString());
jsonGenerator.writeStringField("sub", term.toString());
jsonGenerator.writeEndObject();
jsonGenerator = addChildrenEdges(directChild, loader, jsonGenerator);
}
return jsonGenerator;
}
/**
* Gets all the parents, siblings of the given term and using the jsonGenerator it adds a description
* of the nodes to the json as follow :<br>
* ex : {"IRI":"http://www.ebi.ac.uk/efo/EFO_0000510"},<br>
* {"IRI":"http://www.ebi.ac.uk/efo/EFO_0000506"},<br>
* {"IRI":"http://www.ebi.ac.uk/efo/EFO_0004020"},<br>
* {"IRI":"http://www.ebi.ac.uk/efo/EFO_0004021"},
* {"IRI":"http://www.ebi.ac.uk/efo/EFO_0004293"},<br>
* {"IRI":"http://www.ebi.ac.uk/efo/EFO_0004030"}<br>
* ... etc<br>
* This method is recursive so it will call itself on any found parent.<br>
* @param term - the IRI of the term for which you want to get all the parents and sibling nodes.<br>
* @param loader - an ontology loader for the ontology your interested in<br>
* @param jsonGenerator - a jsonGenerator object<br>
* @return<br>
* @throws IOException<br>
*/
public JsonGenerator addParentsNodes(IRI term, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directParents = loader.getDirectParentTerms(term);
//For each parent
for (IRI directParent : directParents) {
//add the node using the jsonGenerator object.
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("IRI", directParent.toString());
jsonGenerator.writeEndObject();
//get the sibbling of the given term and add them using the jsonGenerator object.
jsonGenerator = addSiblingNodes(directParent, term, loader, jsonGenerator);
//run recursively this method on the directParent
jsonGenerator = addParentsNodes(directParent, loader, jsonGenerator);
}
return jsonGenerator;
}
/**
* Gets all the parents,siblings of the given term and using the jsonGenerator it adds a description<br>
* of the nodes as follow to the json indicating the relationship between the nodes :<br>
* ex : {<br>
* "pred":"is_a",<br>
* "obj":"http://www.ebi.ac.uk/efo/EFO_0004019",<br>
* "sub":"http://www.ebi.ac.uk/efo/EFO_0000510"<br>
* },<br>
* {<br>
* "pred":"is_a",<br>
* "obj":"http://www.ebi.ac.uk/efo/EFO_0000506",<br>
* "sub":"http://www.ebi.ac.uk/efo/EFO_0000510"<br>
* },<br>
* {<br>
* "pred":"is_a",<br>
* "obj":"http://www.ebi.ac.uk/efo/EFO_0004020",<br>
* "sub":"http://www.ebi.ac.uk/efo/EFO_0000510"<br>
* }<br>
* ... etc<br>
* This method is recursive so it will call itself on any found parent.<br>
* @param term - the IRI of the term for which you want to get all the parents and sibling nodes.<br>
* @param loader - an ontology loader for the ontology your interested in<br>
* @param jsonGenerator - a jsonGenerator object<br>
* @return the jsonGenerator object
* @throws IOException
*/
public JsonGenerator addParentsEdges(IRI term, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directParents = loader.getDirectParentTerms(term);
for (IRI directParent : directParents) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("pred", "is_a");
jsonGenerator.writeStringField("obj", term.toString());
jsonGenerator.writeStringField("sub", directParent.toString());
jsonGenerator.writeEndObject();
jsonGenerator = addSiblingEdges(directParent, term, loader, jsonGenerator);
jsonGenerator = addParentsEdges(directParent, loader, jsonGenerator);
}
return jsonGenerator;
}
public JsonGenerator addSiblingNodes(IRI parent, IRI child, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directChildren = loader.getDirectChildTerms(parent);
for (IRI directChild : directChildren) {
if (!directChild.equals(child)) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("IRI", directChild.toString());
jsonGenerator.writeEndObject();
}
}
return jsonGenerator;
}
public JsonGenerator addSiblingEdges(IRI parent, IRI child, OntologyLoader loader, JsonGenerator jsonGenerator) throws IOException {
Collection<IRI> directChildren = loader.getDirectChildTerms(parent);
for (IRI directChild : directChildren) {
if (!directChild.equals(child)) {
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("pred", "is_a");
jsonGenerator.writeStringField("obj", directChild.toString());
jsonGenerator.writeStringField("sub", parent.toString());
jsonGenerator.writeEndObject();
}
}
return jsonGenerator;
}
}
| |
/*
* Copyright (c) 2014-2016 Manuel Mauky
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package eu.lestard.advanced_bindings.api;
import javafx.beans.value.ObservableDoubleValue;
import javafx.beans.value.ObservableFloatValue;
import javafx.beans.value.ObservableIntegerValue;
import org.junit.Test;
import static eu.lestard.advanced_bindings.api.MathBindingsTestHelper.*;
/**
* This test verifies the behaviour of all bindings from {@link eu.lestard.advanced_bindings.api.MathBindings} class.
*
* The purpose of this test class is NOT to verify the calculation logic of {@link java.lang.Math}
* but to only verify that the created bindings are behaving correctly.
*
* Therefore I only verify that every binding has the same value as the corresponding method of the Math class with
* some
* example values.
*/
public class MathBindingsTest {
@Test
public void testAbs() {
testDoubleArgBinding(MathBindings::abs, Math::abs, 10.13, -23.4);
testFloatArgBinding(MathBindings::abs, Math::abs, 10.13f, -23.4f);
testIntegerArgBinding(MathBindings::abs, Math::abs, 20, -13);
testLongArgBinding(MathBindings::abs, Math::abs, 20l, -13l);
}
@Test
public void testAddExact() {
testIntegerBinding2Args1(MathBindings::addExact, Math::addExact, new Args<>(12, 4), new Args<>(0, 1), new Args<>(-123, 234));
testIntegerBinding2Args2(MathBindings::addExact, Math::addExact, new Args<>(12, 4), new Args<>(0, 1), new Args<>(-123, 234));
testIntegerBinding2Args3(MathBindings::addExact, Math::addExact, new Args<>(12, 4), new Args<>(0, 1), new Args<>(-123, 234));
testLongBinding2Args1(MathBindings::addExact, Math::addExact, new Args<>(12l, 4l), new Args<>(0l, 1l), new Args<>(-123l, 234l));
testLongBinding2Args2(MathBindings::addExact, Math::addExact, new Args<>(12l, 4l), new Args<>(0l, 1l), new Args<>(-123l, 234l));
testLongBinding2Args3(MathBindings::addExact, Math::addExact, new Args<>(12l, 4l), new Args<>(0l, 1l), new Args<>(-123l, 234l));
}
@Test
public void testAcos() {
testDoubleArgBinding(MathBindings::acos, Math::acos, 0d, -1d, 1d, Double.NaN, 1.2);
}
@Test
public void testAsin() {
testDoubleArgBinding(MathBindings::asin, Math::asin, Math.PI, -Math.PI, 0d, 1d, -1d, 1.45, Double.NaN);
}
@Test
public void testAtan() {
testDoubleArgBinding(MathBindings::atan, Math::atan, Math.PI, -Math.PI, 0d, 1d, -1d, 1.45, Double.NaN);
}
@Test
public void testAtan2() {
testDoubleBinding2Args1(MathBindings::atan2, Math::atan2,
new Args<>(1.2, 3.2),
new Args<>(0d, 3.2),
new Args<>(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NaN, 1.2));
testDoubleBinding2Args2(MathBindings::atan2, Math::atan2,
new Args<>(1.2, 3.2),
new Args<>(0d, 3.2),
new Args<>(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NaN, 1.2));
testDoubleBinding2Args3(MathBindings::atan2, Math::atan2,
new Args<>(1.2, 3.2),
new Args<>(0d, 3.2),
new Args<>(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY),
new Args<>(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NEGATIVE_INFINITY, Double.NEGATIVE_INFINITY),
new Args<>(Double.NaN, 1.2));
}
@Test
public void testCbrt() {
testDoubleArgBinding(MathBindings::cbrt, Math::cbrt, 0d, -12.3, 12.3, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
}
@Test
public void testCeil() {
testDoubleArgBinding(MathBindings::ceil, Math::ceil, 0d, 0.14, -0.14, 1d, 14d, 12.3, -12.4, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
}
@Test
public void testCopySign() {
testDoubleBinding2Args1(MathBindings::copySign, Math::copySign, new Args<>(-12.3, 123.2), new Args<>(12.3, -123.2), new Args<>(Double.NaN, 123.2));
testDoubleBinding2Args2(MathBindings::copySign, Math::copySign, new Args<>(-12.3, 123.2), new Args<>(12.3, -123.2), new Args<>(Double.NaN, 123.2));
testDoubleBinding2Args3(MathBindings::copySign, Math::copySign, new Args<>(-12.3, 123.2), new Args<>(12.3, -123.2), new Args<>(Double.NaN, 123.2));
testFloatBinding2Args1(MathBindings::copySign, Math::copySign, new Args<>(-12.3f, 123.2f), new Args<>(12.3f, -123.2f), new Args<>(Float.NaN, 123.2f));
testFloatBinding2Args2(MathBindings::copySign, Math::copySign, new Args<>(-12.3f, 123.2f), new Args<>(12.3f, -123.2f), new Args<>(Float.NaN, 123.2f));
testFloatBinding2Args3(MathBindings::copySign, Math::copySign, new Args<>(-12.3f, 123.2f), new Args<>(12.3f, -123.2f), new Args<>(Float.NaN, 123.2f));
}
@Test
public void testCos() {
testDoubleArgBinding(MathBindings::cos, Math::cos, 0d, 1d, -1d, Math.PI, -Math.PI, Double.NaN);
}
@Test
public void testCosh() {
testDoubleArgBinding(MathBindings::cosh, Math::cosh, 0d, 1d, -1d, Math.PI, -Math.PI, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
}
@Test
public void testDecrementExact() {
testIntegerArgBinding(MathBindings::decrementExact, Math::decrementExact, -10, -1, 0, 1, 10);
testLongArgBinding(MathBindings::decrementExact, Math::decrementExact, -10l, -1l, 0l, 1l, 10l);
}
@Test
public void testExp() {
testDoubleArgBinding(MathBindings::exp, Math::exp, -1d, 0d, 1d, 10d, 13.402, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testExpm1() {
testDoubleArgBinding(MathBindings::expm1, Math::expm1, -1d, 0d, 1d, 10d, 13.402, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testFloor() {
testDoubleArgBinding(MathBindings::floor, Math::floor, -12.13, -1.23, -1d, 0d, 1d, 1.23, 1.98, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
}
@Test
public void testFloorDiv() {
testIntegerBinding2Args1(MathBindings::floorDiv, Math::floorDiv, new Args<>(4, 3), new Args<>(-4, 3));
testIntegerBinding2Args2(MathBindings::floorDiv, Math::floorDiv, new Args<>(4, 3), new Args<>(-4, 3));
testIntegerBinding2Args3(MathBindings::floorDiv, Math::floorDiv, new Args<>(4, 3), new Args<>(-4, 3));
testLongBinding2Args1(MathBindings::floorDiv, Math::floorDiv, new Args<>(4l, 3l), new Args<>(-4l, 3l));
testLongBinding2Args2(MathBindings::floorDiv, Math::floorDiv, new Args<>(4l, 3l), new Args<>(-4l, 3l));
testLongBinding2Args3(MathBindings::floorDiv, Math::floorDiv, new Args<>(4l, 3l), new Args<>(-4l, 3l));
}
@Test
public void testFloorMod() {
testIntegerBinding2Args1(MathBindings::floorMod, Math::floorMod, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, -3));
testIntegerBinding2Args2(MathBindings::floorMod, Math::floorMod, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, -3));
testIntegerBinding2Args3(MathBindings::floorMod, Math::floorMod, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, -3));
testLongBinding2Args1(MathBindings::floorMod, Math::floorMod, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, -3l));
testLongBinding2Args2(MathBindings::floorMod, Math::floorMod, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, -3l));
testLongBinding2Args3(MathBindings::floorMod, Math::floorMod, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, -3l));
}
@Test
public void testGetExponent() {
testDoubleArgBinding(MathBindings::getExponent, Math::getExponent, 2.1, 2.34, Double.NaN, Double.POSITIVE_INFINITY, 0d);
testFloatArgBinding(MathBindings::getExponent, Math::getExponent, 2.1f, 2.34f, Float.NaN, Float.POSITIVE_INFINITY, 0f);
}
@Test
public void testHypot() {
testDoubleBinding2Args1(MathBindings::hypot, Math::hypot, new Args<>(1.2, 0.2), new Args<>(Double.POSITIVE_INFINITY, 0.2), new Args<>(1.2, Double.NEGATIVE_INFINITY), new Args<>(Double.NaN, 0.2));
testDoubleBinding2Args2(MathBindings::hypot, Math::hypot, new Args<>(1.2, 0.2), new Args<>(Double.POSITIVE_INFINITY, 0.2), new Args<>(1.2, Double.NEGATIVE_INFINITY), new Args<>(Double.NaN, 0.2));
testDoubleBinding2Args3(MathBindings::hypot, Math::hypot, new Args<>(1.2, 0.2), new Args<>(Double.POSITIVE_INFINITY, 0.2), new Args<>(1.2, Double.NEGATIVE_INFINITY), new Args<>(Double.NaN, 0.2));
}
@Test
public void testIEEEremainder() {
testDoubleBinding2Args1(MathBindings::IEEEremainder, Math::IEEEremainder,
new Args<>(1.2, 0.2),
new Args<>(Double.POSITIVE_INFINITY, 12.2),
new Args<>(Double.NEGATIVE_INFINITY, 1.2),
new Args<>(1.2, Double.POSITIVE_INFINITY),
new Args<>(1.2, Double.NEGATIVE_INFINITY),
new Args<>(1.2, 0d),
new Args<>(0d, 1.2),
new Args<>(Double.NaN, 1.2),
new Args<>(1.2, Double.NaN));
testDoubleBinding2Args2(MathBindings::IEEEremainder, Math::IEEEremainder,
new Args<>(1.2, 0.2),
new Args<>(Double.POSITIVE_INFINITY, 12.2),
new Args<>(Double.NEGATIVE_INFINITY, 1.2),
new Args<>(1.2, Double.POSITIVE_INFINITY),
new Args<>(1.2, Double.NEGATIVE_INFINITY),
new Args<>(1.2, 0d),
new Args<>(0d, 1.2),
new Args<>(Double.NaN, 1.2),
new Args<>(1.2, Double.NaN));
testDoubleBinding2Args3(MathBindings::IEEEremainder, Math::IEEEremainder,
new Args<>(1.2, 0d),
new Args<>(Double.POSITIVE_INFINITY, 12.2),
new Args<>(Double.NEGATIVE_INFINITY, 1.2),
new Args<>(1.2, Double.POSITIVE_INFINITY),
new Args<>(1.2, Double.NEGATIVE_INFINITY),
new Args<>(1.2, 0d),
new Args<>(0d, 1.2),
new Args<>(Double.NaN, 1.2),
new Args<>(1.2, Double.NaN));
}
@Test
public void testIncrementExact() {
testIntegerArgBinding(MathBindings::incrementExact, Math::incrementExact, -10, -1, 0, 1, 10);
testLongArgBinding(MathBindings::incrementExact, Math::incrementExact, -10l, -1l, 0l, 1l, 10l);
}
@Test
public void testLog() {
testDoubleArgBinding(MathBindings::log, Math::log, -12.3, -1d, 0d, 1d, 12.3, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testLog10() {
testDoubleArgBinding(MathBindings::log10, Math::log10, -12.3, -1d, 0d, 1d, 12.3, 100d, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testLog1p() {
testDoubleArgBinding(MathBindings::log1p, Math::log1p, -12.3, -1d, 0d, 1d, 12.3, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testMax() {
testIntegerBinding2Args1(MathBindings::max, Math::max, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testIntegerBinding2Args2(MathBindings::max, Math::max, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testIntegerBinding2Args3(MathBindings::max, Math::max, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testLongBinding2Args1(MathBindings::max, Math::max, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testLongBinding2Args2(MathBindings::max, Math::max, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testLongBinding2Args3(MathBindings::max, Math::max, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testDoubleBinding2Args1(MathBindings::max, Math::max, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testDoubleBinding2Args2(MathBindings::max, Math::max, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testDoubleBinding2Args3(MathBindings::max, Math::max, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testFloatBinding2Args1(MathBindings::max, Math::max, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
testFloatBinding2Args2(MathBindings::max, Math::max, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
testFloatBinding2Args3(MathBindings::max, Math::max, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
}
@Test
public void testMin() {
testIntegerBinding2Args1(MathBindings::min, Math::min, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testIntegerBinding2Args2(MathBindings::min, Math::min, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testIntegerBinding2Args3(MathBindings::min, Math::min, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(4, 4));
testLongBinding2Args1(MathBindings::min, Math::min, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testLongBinding2Args2(MathBindings::min, Math::min, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testLongBinding2Args3(MathBindings::min, Math::min, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(4l, 4l));
testDoubleBinding2Args1(MathBindings::min, Math::min, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testDoubleBinding2Args2(MathBindings::min, Math::min, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testDoubleBinding2Args3(MathBindings::min, Math::min, new Args<>(4.3, 3.1), new Args<>(-4.3, 3.1), new Args<>(4.3, 4.3));
testFloatBinding2Args1(MathBindings::min, Math::min, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
testFloatBinding2Args2(MathBindings::min, Math::min, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
testFloatBinding2Args3(MathBindings::min, Math::min, new Args<>(4.3f, 3.1f), new Args<>(-4.3f, 3.1f), new Args<>(4.3f, 4.3f));
}
@Test
public void testMultiplyExact() {
testIntegerBinding2Args1(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testIntegerBinding2Args2(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testIntegerBinding2Args3(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testLongBinding2Args1(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
testLongBinding2Args2(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
testLongBinding2Args3(MathBindings::multiplyExact, Math::multiplyExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
}
@Test
public void testNegateExact() {
testIntegerArgBinding(MathBindings::negateExact, Math::negateExact, -10, -1, 0, 1, 10);
testLongArgBinding(MathBindings::negateExact, Math::negateExact, -10l, -1l, 0l, 1l, 10l);
}
@Test
public void testNextAfter() {
testDoubleBinding2Args1(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2, 0.3),
new Args<>(1.2, Double.NaN),
new Args<>(Double.MIN_VALUE, -0.3),
new Args<>(Double.NEGATIVE_INFINITY, -0.3),
new Args<>(-0d, +0d));
testDoubleBinding2Args2(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2, 0.3),
new Args<>(1.2, Double.NaN),
new Args<>(Double.MIN_VALUE, -0.3),
new Args<>(Double.NEGATIVE_INFINITY, -0.3),
new Args<>(-0d, +0d));
testDoubleBinding2Args3(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2, 0.3),
new Args<>(1.2, Double.NaN),
new Args<>(Double.MIN_VALUE, -0.3),
new Args<>(Double.NEGATIVE_INFINITY, -0.3),
new Args<>(-0d, +0d));
testFloatBinding2Args1(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2f, 0.3f),
new Args<>(1.2f, Float.NaN),
new Args<>(Float.MIN_VALUE, -0.3f),
new Args<>(Float.NEGATIVE_INFINITY, -0.3f),
new Args<>(-0f, +0f));
testFloatBinding2Args2(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2f, 0.3f),
new Args<>(1.2f, Float.NaN),
new Args<>(Float.MIN_VALUE, -0.3f),
new Args<>(Float.NEGATIVE_INFINITY, -0.3f),
new Args<>(-0f, +0f));
testFloatBinding2Args3(MathBindings::nextAfter, Math::nextAfter,
new Args<>(1.2f, 0.3f),
new Args<>(1.2f, Float.NaN),
new Args<>(Float.MIN_VALUE, -0.3f),
new Args<>(Float.NEGATIVE_INFINITY, -0.3f),
new Args<>(-0f, +0f));
}
@Test
public void testNextDown() {
testDoubleArgBinding(MathBindings::nextDown, Math::nextDown, -12.3, -1d, 0d, 1d, 12.3, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
testFloatArgBinding(MathBindings::nextDown, Math::nextDown, -12.3f, -1f, 0f, 1f, 12.3f, Float.NaN, Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY);
}
@Test
public void testNextUp() {
testDoubleArgBinding(MathBindings::nextUp, Math::nextUp, -12.3, -1d, 0d, 1d, 12.3, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
testFloatArgBinding(MathBindings::nextUp, Math::nextUp, -12.3f, -1f, 0f, 1f, 12.3f, Float.NaN, Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY);
}
@Test
public void testPow() {
testDoubleBinding2Args1(MathBindings::pow, Math::pow,
new Args<>(1.3, 3.2),
new Args<>(1.3, 0d),
new Args<>(1d, 3.2),
new Args<>(1.3, Double.NaN),
new Args<>(Double.NaN, 3.2),
new Args<>(Double.NaN, 0d),
new Args<>(1.1, Double.POSITIVE_INFINITY),
new Args<>(1d, 2d));
testDoubleBinding2Args2(MathBindings::pow, Math::pow,
new Args<>(1.3, 3.2),
new Args<>(1.3, 0d),
new Args<>(1d, 3.2),
new Args<>(1.3, Double.NaN),
new Args<>(Double.NaN, 3.2),
new Args<>(Double.NaN, 0d),
new Args<>(1.1, Double.POSITIVE_INFINITY),
new Args<>(1d, 2d));
testDoubleBinding2Args3(MathBindings::pow, Math::pow,
new Args<>(1.3, 3.2),
new Args<>(1.3, 0d),
new Args<>(1d, 3.2),
new Args<>(1.3, Double.NaN),
new Args<>(Double.NaN, 3.2),
new Args<>(Double.NaN, 0d),
new Args<>(1.1, Double.POSITIVE_INFINITY),
new Args<>(1d, 2d));
}
@Test
public void testRint() {
testDoubleArgBinding(MathBindings::rint, Math::rint, -1d, 0d, 1d, 1.12, 1.89, 1.5, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
}
@Test
public void testRound() {
testDoubleArgBinding(MathBindings::round, Math::round, 1.34, 1.89, -12.34, -32.9, Double.NaN, 0d);
testFloatArgBinding(MathBindings::round, Math::round, 1.34f, 1.89f, -12.34f, -32.9f, Float.NaN, 0f);
}
@SuppressWarnings("unchecked")
@Test
public void testScalb(){
MathBindingsTestHelper.<Float, Integer, ObservableFloatValue, ObservableIntegerValue, Number>
testTwoArgBinding1(MathBindings::scalb, Math::scalb, new Args<>(12f, 12), new Args<>(Float.NaN, 12), new Args<>(Float.POSITIVE_INFINITY, 3), new Args<>(0f, 3));
MathBindingsTestHelper.<Float, Integer, ObservableIntegerValue, Number>
testTwoArgBinding2(MathBindings::scalb, Math::scalb, new Args<>(12f, 12), new Args<>(Float.NaN, 12), new Args<>(Float.POSITIVE_INFINITY, 3), new Args<>(0f, 3));
MathBindingsTestHelper.<Float, Integer, ObservableFloatValue, Number>
testTwoArgBinding3(MathBindings::scalb, Math::scalb, new Args<>(12f, 12), new Args<>(Float.NaN, 12), new Args<>(Float.POSITIVE_INFINITY, 3), new Args<>(0f, 3));
MathBindingsTestHelper.<Double, Integer, ObservableDoubleValue, ObservableIntegerValue, Number>
testTwoArgBinding1(MathBindings::scalb, Math::scalb, new Args<>(12d, 12), new Args<>(Double.NaN, 12), new Args<>(Double.POSITIVE_INFINITY, 3), new Args<>(0d, 3));
MathBindingsTestHelper.<Double, Integer, ObservableIntegerValue, Number>
testTwoArgBinding2(MathBindings::scalb, Math::scalb, new Args<>(12d, 12), new Args<>(Double.NaN, 12), new Args<>(Double.POSITIVE_INFINITY, 3), new Args<>(0d, 3));
MathBindingsTestHelper.<Double, Integer, ObservableDoubleValue, Number>
testTwoArgBinding3(MathBindings::scalb, Math::scalb, new Args<>(12d, 12), new Args<>(Double.NaN, 12), new Args<>(Double.POSITIVE_INFINITY, 3), new Args<>(0d, 3));
}
@Test
public void testSignum() {
testDoubleArgBinding(MathBindings::signum, Math::signum, -1d, -1.23, -0.23, 0d, 0.23, -1.23, 1d, Double.NaN);
testFloatArgBinding(MathBindings::signum, Math::signum, -1f, -1.23f, -0.23f, 0f, 0.23f, -1.23f, 1f, Float.NaN);
}
@Test
public void testSin() {
testDoubleArgBinding(MathBindings::sin, Math::sin, -1d, 0d, 1d, Math.PI, -Math.PI, Double.NaN);
}
@Test
public void testSinh() {
testDoubleArgBinding(MathBindings::sinh, Math::sinh, -1d, 0d, 1d, Math.PI, -Math.PI, Double.NaN);
}
@Test
public void testSqrt() {
testDoubleArgBinding(MathBindings::sqrt, Math::sqrt, -12d, -10.34, -1d, 0d, 1d, 10.34, 12d, Double.NaN, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
}
@Test
public void testSubtractExact() {
testIntegerBinding2Args1(MathBindings::subtractExact, Math::subtractExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testIntegerBinding2Args2(MathBindings::subtractExact, Math::subtractExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testIntegerBinding2Args3(MathBindings::subtractExact, Math::subtractExact, new Args<>(4, 3), new Args<>(-4, 3), new Args<>(0, 4));
testLongBinding2Args1(MathBindings::subtractExact, Math::subtractExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
testLongBinding2Args2(MathBindings::subtractExact, Math::subtractExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
testLongBinding2Args3(MathBindings::subtractExact, Math::subtractExact, new Args<>(4l, 3l), new Args<>(-4l, 3l), new Args<>(0l, 4l));
}
@Test
public void testTan() {
testDoubleArgBinding(MathBindings::tan, Math::tan, -1d, 0d, 1d, Math.PI, -Math.PI, Double.NaN);
}
@Test
public void testTanh() {
testDoubleArgBinding(MathBindings::tanh, Math::tanh, -1d, 0d, 1d, Math.PI, -Math.PI, Double.NaN);
}
@Test
public void testToDegrees() {
testDoubleArgBinding(MathBindings::toDegrees, Math::toDegrees, -1d, 0d, 1d, 90d, -90d, 180d, 360d, 359d, Double.NaN);
}
@Test
public void testToIntExact() {
testLongArgBinding(MathBindings::toIntExact, Math::toIntExact, -1l, 0l, 302l);
}
@Test
public void testToRadians() {
testDoubleArgBinding(MathBindings::toRadians, Math::toRadians, -1d, 0d, 1d, Math.PI, 2 * Math.PI, Double.NaN);
}
@Test
public void testUlp() {
testDoubleArgBinding(MathBindings::ulp, Math::ulp, 0d, 1d, 102.3, -102.3, Double.MAX_VALUE, Double.MIN_VALUE, Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
testFloatArgBinding(MathBindings::ulp, Math::ulp, 0f, 1f, 102.3f, -102.3f, Float.MAX_VALUE, Float.MIN_VALUE, Float.NaN, Float.POSITIVE_INFINITY, Float.NEGATIVE_INFINITY);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sanselan.formats.icns;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.sanselan.ImageReadException;
import com.google.code.appengine.awt.image.BufferedImage;
public class IcnsDecoder
{
private static final int[] palette_4bpp =
{
0xffffffff,
0xfffcf305,
0xffff6402,
0xffdd0806,
0xfff20884,
0xff4600a5,
0xff0000d4,
0xff02abea,
0xff1fb714,
0xff006411,
0xff562c05,
0xff90713a,
0xffc0c0c0,
0xff808080,
0xff404040,
0xff000000
};
private static final int[] palette_8bpp =
{
0xFFFFFFFF,
0xFFFFFFCC,
0xFFFFFF99,
0xFFFFFF66,
0xFFFFFF33,
0xFFFFFF00,
0xFFFFCCFF,
0xFFFFCCCC,
0xFFFFCC99,
0xFFFFCC66,
0xFFFFCC33,
0xFFFFCC00,
0xFFFF99FF,
0xFFFF99CC,
0xFFFF9999,
0xFFFF9966,
0xFFFF9933,
0xFFFF9900,
0xFFFF66FF,
0xFFFF66CC,
0xFFFF6699,
0xFFFF6666,
0xFFFF6633,
0xFFFF6600,
0xFFFF33FF,
0xFFFF33CC,
0xFFFF3399,
0xFFFF3366,
0xFFFF3333,
0xFFFF3300,
0xFFFF00FF,
0xFFFF00CC,
0xFFFF0099,
0xFFFF0066,
0xFFFF0033,
0xFFFF0000,
0xFFCCFFFF,
0xFFCCFFCC,
0xFFCCFF99,
0xFFCCFF66,
0xFFCCFF33,
0xFFCCFF00,
0xFFCCCCFF,
0xFFCCCCCC,
0xFFCCCC99,
0xFFCCCC66,
0xFFCCCC33,
0xFFCCCC00,
0xFFCC99FF,
0xFFCC99CC,
0xFFCC9999,
0xFFCC9966,
0xFFCC9933,
0xFFCC9900,
0xFFCC66FF,
0xFFCC66CC,
0xFFCC6699,
0xFFCC6666,
0xFFCC6633,
0xFFCC6600,
0xFFCC33FF,
0xFFCC33CC,
0xFFCC3399,
0xFFCC3366,
0xFFCC3333,
0xFFCC3300,
0xFFCC00FF,
0xFFCC00CC,
0xFFCC0099,
0xFFCC0066,
0xFFCC0033,
0xFFCC0000,
0xFF99FFFF,
0xFF99FFCC,
0xFF99FF99,
0xFF99FF66,
0xFF99FF33,
0xFF99FF00,
0xFF99CCFF,
0xFF99CCCC,
0xFF99CC99,
0xFF99CC66,
0xFF99CC33,
0xFF99CC00,
0xFF9999FF,
0xFF9999CC,
0xFF999999,
0xFF999966,
0xFF999933,
0xFF999900,
0xFF9966FF,
0xFF9966CC,
0xFF996699,
0xFF996666,
0xFF996633,
0xFF996600,
0xFF9933FF,
0xFF9933CC,
0xFF993399,
0xFF993366,
0xFF993333,
0xFF993300,
0xFF9900FF,
0xFF9900CC,
0xFF990099,
0xFF990066,
0xFF990033,
0xFF990000,
0xFF66FFFF,
0xFF66FFCC,
0xFF66FF99,
0xFF66FF66,
0xFF66FF33,
0xFF66FF00,
0xFF66CCFF,
0xFF66CCCC,
0xFF66CC99,
0xFF66CC66,
0xFF66CC33,
0xFF66CC00,
0xFF6699FF,
0xFF6699CC,
0xFF669999,
0xFF669966,
0xFF669933,
0xFF669900,
0xFF6666FF,
0xFF6666CC,
0xFF666699,
0xFF666666,
0xFF666633,
0xFF666600,
0xFF6633FF,
0xFF6633CC,
0xFF663399,
0xFF663366,
0xFF663333,
0xFF663300,
0xFF6600FF,
0xFF6600CC,
0xFF660099,
0xFF660066,
0xFF660033,
0xFF660000,
0xFF33FFFF,
0xFF33FFCC,
0xFF33FF99,
0xFF33FF66,
0xFF33FF33,
0xFF33FF00,
0xFF33CCFF,
0xFF33CCCC,
0xFF33CC99,
0xFF33CC66,
0xFF33CC33,
0xFF33CC00,
0xFF3399FF,
0xFF3399CC,
0xFF339999,
0xFF339966,
0xFF339933,
0xFF339900,
0xFF3366FF,
0xFF3366CC,
0xFF336699,
0xFF336666,
0xFF336633,
0xFF336600,
0xFF3333FF,
0xFF3333CC,
0xFF333399,
0xFF333366,
0xFF333333,
0xFF333300,
0xFF3300FF,
0xFF3300CC,
0xFF330099,
0xFF330066,
0xFF330033,
0xFF330000,
0xFF00FFFF,
0xFF00FFCC,
0xFF00FF99,
0xFF00FF66,
0xFF00FF33,
0xFF00FF00,
0xFF00CCFF,
0xFF00CCCC,
0xFF00CC99,
0xFF00CC66,
0xFF00CC33,
0xFF00CC00,
0xFF0099FF,
0xFF0099CC,
0xFF009999,
0xFF009966,
0xFF009933,
0xFF009900,
0xFF0066FF,
0xFF0066CC,
0xFF006699,
0xFF006666,
0xFF006633,
0xFF006600,
0xFF0033FF,
0xFF0033CC,
0xFF003399,
0xFF003366,
0xFF003333,
0xFF003300,
0xFF0000FF,
0xFF0000CC,
0xFF000099,
0xFF000066,
0xFF000033,
0xFFEE0000,
0xFFDD0000,
0xFFBB0000,
0xFFAA0000,
0xFF880000,
0xFF770000,
0xFF550000,
0xFF440000,
0xFF220000,
0xFF110000,
0xFF00EE00,
0xFF00DD00,
0xFF00BB00,
0xFF00AA00,
0xFF008800,
0xFF007700,
0xFF005500,
0xFF004400,
0xFF002200,
0xFF001100,
0xFF0000EE,
0xFF0000DD,
0xFF0000BB,
0xFF0000AA,
0xFF000088,
0xFF000077,
0xFF000055,
0xFF000044,
0xFF000022,
0xFF000011,
0xFFEEEEEE,
0xFFDDDDDD,
0xFFBBBBBB,
0xFFAAAAAA,
0xFF888888,
0xFF777777,
0xFF555555,
0xFF444444,
0xFF222222,
0xFF111111,
0xFF000000
};
private static void decode1BPPImage(IcnsType imageType, byte[] imageData,
BufferedImage bufferedImage)
{
int position = 0;
int bitsLeft = 0;
int value = 0;
for (int y = 0; y < imageType.getHeight(); y++)
{
for (int x = 0; x < imageType.getWidth(); x++)
{
if (bitsLeft == 0)
{
value = 0xff & imageData[position++];
bitsLeft = 8;
}
int argb;
if ((value & 0x80) != 0)
argb = 0xff000000;
else
argb = 0xffffffff;
value <<= 1;
bitsLeft--;
bufferedImage.setRGB(x, y, argb);
}
}
}
private static void decode4BPPImage(IcnsType imageType, byte[] imageData,
BufferedImage bufferedImage)
{
int i = 0;
boolean visited = false;
for (int y = 0; y < imageType.getHeight(); y++)
{
for (int x = 0; x < imageType.getWidth(); x++)
{
int index;
if (!visited)
index = 0xf & (imageData[i] >> 4);
else
index = 0xf & imageData[i++];
visited = !visited;
bufferedImage.setRGB(x, y, palette_4bpp[index]);
}
}
}
private static void decode8BPPImage(IcnsType imageType, byte[] imageData,
BufferedImage bufferedImage)
{
for (int y = 0; y < imageType.getHeight(); y++)
{
for (int x = 0; x < imageType.getWidth(); x++)
{
int index = 0xff & imageData[y*imageType.getWidth() + x];
bufferedImage.setRGB(x, y, palette_8bpp[index]);
}
}
}
private static void decode32BPPImage(IcnsType imageType, byte[] imageData,
BufferedImage bufferedImage)
{
for (int y = 0; y < imageType.getHeight(); y++)
{
for (int x = 0; x < imageType.getWidth(); x++)
{
int argb = 0xff000000 /* the "alpha" is ignored */ |
((0xff & imageData[4*(y*imageType.getWidth() + x) + 1]) << 16) |
((0xff & imageData[4*(y*imageType.getWidth() + x) + 2]) << 8) |
(0xff & imageData[4*(y*imageType.getWidth() + x) + 3]);
bufferedImage.setRGB(x, y, argb);
}
}
}
private static void apply1BPPMask(byte[] maskData, BufferedImage bufferedImage) throws ImageReadException
{
int position = 0;
int bitsLeft = 0;
int value = 0;
// 1 bit icon types have image data followed by mask data in the same entry
int totalBytes = (bufferedImage.getWidth() * bufferedImage.getHeight() + 7) / 8;
if (maskData.length >= 2*totalBytes)
position = totalBytes;
else
throw new ImageReadException("1 BPP mask underrun parsing ICNS file");
for (int y = 0; y < bufferedImage.getHeight(); y++)
{
for (int x = 0; x < bufferedImage.getWidth(); x++)
{
if (bitsLeft == 0)
{
value = 0xff & maskData[position++];
bitsLeft = 8;
}
int alpha;
if ((value & 0x80) != 0)
alpha = 0xff;
else
alpha = 0x00;
value <<= 1;
bitsLeft--;
bufferedImage.setRGB(x, y, (alpha << 24) |
(0xffffff & bufferedImage.getRGB(x, y)));
}
}
}
private static void apply8BPPMask(byte[] maskData, BufferedImage bufferedImage)
{
for (int y = 0; y < bufferedImage.getHeight(); y++)
{
for (int x = 0; x < bufferedImage.getWidth(); x++)
{
int alpha = 0xff & maskData[y*bufferedImage.getWidth() + x];
bufferedImage.setRGB(x, y, (alpha << 24) |
(0xffffff & bufferedImage.getRGB(x, y)));
}
}
}
public static ArrayList decodeAllImages(IcnsImageParser.IcnsElement[] icnsElements)
throws ImageReadException, IOException
{
ArrayList result = new ArrayList();
for (int i = 0; i < icnsElements.length; i++)
{
IcnsImageParser.IcnsElement imageElement = icnsElements[i];
IcnsType imageType = IcnsType.findImageType(imageElement.type);
if (imageType == null)
continue;
IcnsType maskType = null;
IcnsImageParser.IcnsElement maskElement = null;
if (imageType.hasMask())
{
maskType = imageType;
maskElement = imageElement;
}
else
{
maskType = IcnsType.find8BPPMaskType(imageType);
if (maskType != null)
{
for (int j = 0; j < icnsElements.length; j++)
{
if (icnsElements[j].type == maskType.getType())
{
maskElement = icnsElements[j];
break;
}
}
}
if (maskElement == null)
{
maskType = IcnsType.find1BPPMaskType(imageType);
if (maskType != null)
{
for (int j = 0; j < icnsElements.length; j++)
{
if (icnsElements[j].type == maskType.getType())
{
maskElement = icnsElements[j];
break;
}
}
}
}
}
// FIXME: don't skip these when JPEG 2000 support is added:
if (imageType == IcnsType.ICNS_256x256_32BIT_ARGB_IMAGE ||
imageType == IcnsType.ICNS_512x512_32BIT_ARGB_IMAGE)
continue;
int expectedSize = (imageType.getWidth()*imageType.getHeight()*
imageType.getBitsPerPixel() + 7) / 8;
byte[] imageData;
if (imageElement.data.length < expectedSize)
{
if (imageType.getBitsPerPixel() == 32)
{
imageData = Rle24Compression.decompress(imageType.getWidth(),
imageType.getHeight(), imageElement.data);
}
else
throw new ImageReadException(
"Short image data but not a 32 bit compressed type");
}
else
imageData = imageElement.data;
BufferedImage bufferedImage = new BufferedImage(imageType.getWidth(),
imageType.getHeight(), BufferedImage.TYPE_INT_ARGB);
switch (imageType.getBitsPerPixel())
{
case 1:
decode1BPPImage(imageType, imageData, bufferedImage);
break;
case 4:
decode4BPPImage(imageType, imageData, bufferedImage);
break;
case 8:
decode8BPPImage(imageType, imageData, bufferedImage);
break;
case 32:
decode32BPPImage(imageType, imageData, bufferedImage);
break;
default:
throw new ImageReadException(
"Unsupported bit depth " + imageType.getBitsPerPixel());
}
if (maskElement != null)
{
if (maskType.getBitsPerPixel() == 1)
apply1BPPMask(maskElement.data, bufferedImage);
else if (maskType.getBitsPerPixel() == 8)
apply8BPPMask(maskElement.data, bufferedImage);
else
throw new ImageReadException("Unsupport mask bit depth " +
maskType.getBitsPerPixel());
}
result.add(bufferedImage);
}
return result;
}
}
| |
package com.alexvasilkov.events;
import android.os.Looper;
import android.os.SystemClock;
import android.support.test.annotation.UiThreadTest;
import com.alexvasilkov.events.Events.Background;
import com.alexvasilkov.events.Events.Failure;
import com.alexvasilkov.events.Events.Result;
import com.alexvasilkov.events.Events.Status;
import com.alexvasilkov.events.Events.Subscribe;
import com.alexvasilkov.events.internal.EventsParams;
import com.alexvasilkov.events.utils.Counter;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
public class ThreadingTest extends AbstractTest {
private static final long THREAD_SLEEP = 30L;
private static final long WAITING_TIME = 400L;
private static final Object NOTIFIER = new Object();
// ----------------------------
// Main thread callbacks
// ----------------------------
@Test
public void testMainThreadSubscribe() {
postAndWait(new Object() {
@Subscribe(TASK_KEY)
private void subscribe() {
assertMainThread();
testNotify();
}
});
}
@Test
public void testMainThreadStatus() {
postAndWait(new Object() {
@Subscribe(TASK_KEY)
private void subscribe() {}
@Status(TASK_KEY)
private void status(EventStatus status) {
assertMainThread();
testNotify();
}
});
}
@Test
public void testMainThreadResult() {
postAndWait(new Object() {
@Subscribe(TASK_KEY)
private Object subscribe() {
return null;
}
@Result(TASK_KEY)
private void result() {
assertMainThread();
testNotify();
}
});
}
@Test
public void testMainThreadFailure() {
post(new Object() {
@Subscribe(TASK_KEY)
private void subscribe() {
throw ERROR;
}
@Failure(TASK_KEY)
private void failure() {
assertMainThread();
testNotify();
}
});
}
@Test
public void testMainThreadGeneralFailure() {
post(new Object() {
@Subscribe(TASK_KEY)
private void subscribe() {
throw ERROR;
}
@Failure
private void failureGeneral() {
assertMainThread();
testNotify();
}
});
}
// ----------------------------
// Background thread execution
// ----------------------------
@Test
public void testBackgroundEvent() {
postAndWait(BackgroundThreadTarget.class, new Object() {
@Result(TASK_KEY)
private void result() {
assertMainThread();
testNotify();
}
});
}
@Test
public void testBackgroundFlow() {
postAndWait(BackgroundThreadTarget.class, new Object() {
@Status(TASK_KEY)
private void status(EventStatus status) {
assertMainThread();
counter.count(status);
if (status == EventStatus.FINISHED) {
testNotify();
}
}
@Result(TASK_KEY)
private void result() {
assertMainThread();
counter.count(Result.class);
}
});
counter.check(EventStatus.STARTED, Subscribe.class, Result.class,
EventStatus.FINISHED);
}
@Test
public void testSingleThread() {
postAndWait(SingleThreadTarget.class, new Object() {
@Result(TASK_KEY)
private void result(int end) {
if (end == 6) {
testNotify();
}
}
}, new Runnable() {
@Override
public void run() {
Events.create(TASK_KEY).param(counter, 1, 2).post();
Events.create(TASK_KEY).param(counter, 3, 4).post();
Events.create(TASK_KEY).param(counter, 5, 6).post();
}
});
counter.check(1, 2, 3, 4, 5, 6);
}
// ----------------------------
// Other tests
// ----------------------------
@Test
@UiThreadTest
public void mainThreadShouldNotBeBlockedForLongTime() {
final long delay = 10L;
EventsParams.setMaxTimeInUiThread(delay);
post(new Object() {
@Status(TASK_KEY)
private void status(EventStatus status) {
SystemClock.sleep(delay + 1L);
counter.count(status);
}
@Subscribe(TASK_KEY)
private Object subscribe(Event event) {
fail("Subscriber should not be executed");
return null;
}
@Failure(TASK_KEY)
private void failure(Throwable throwable) throws Throwable {
throw throwable; // Throwing out
}
});
// Only "started" status should be executed, all other events should be delayed
counter.check(EventStatus.STARTED);
}
// ----------------------------
// Helper classes and methods
// ----------------------------
private static void testWait() {
synchronized (NOTIFIER) {
try {
long started = System.currentTimeMillis();
NOTIFIER.wait(WAITING_TIME);
if (System.currentTimeMillis() - started >= WAITING_TIME) {
fail("Waiting too long for event to finish");
}
} catch (Exception ignored) {
}
}
}
private static void testNotify() {
synchronized (NOTIFIER) {
NOTIFIER.notifyAll();
}
}
private static void assertMainThread() {
assertEquals(Looper.getMainLooper().getThread(), Thread.currentThread());
}
private static void assertBackgroundThread() {
assertNotEquals(Looper.getMainLooper().getThread(), Thread.currentThread());
}
protected void postAndWait(Object target) {
try {
Events.register(target);
Events.create(TASK_KEY).param(counter).post();
testWait();
} finally {
Events.unregister(target);
}
}
protected void postAndWait(Class staticTarget, Object callbackTarget) {
postAndWait(staticTarget, callbackTarget, new Runnable() {
@Override
public void run() {
Events.create(TASK_KEY).param(counter).post();
}
});
}
protected void postAndWait(Class staticTarget, Object callbackTarget, Runnable task) {
try {
Events.register(staticTarget);
Events.register(callbackTarget);
task.run();
testWait();
} finally {
try {
Events.unregister(callbackTarget);
} finally {
Events.unregister(staticTarget);
}
}
}
private static class BackgroundThreadTarget {
@Background
@Subscribe(TASK_KEY)
private static Object subscribe(Counter counter) {
assertBackgroundThread();
counter.count(Subscribe.class);
return RESULT;
}
}
private static class SingleThreadTarget {
@Background(singleThread = true)
@Subscribe(TASK_KEY)
private static int subscribe(Counter counter, int start, int end) {
assertBackgroundThread();
counter.count(start);
SystemClock.sleep(THREAD_SLEEP);
counter.count(end);
return end;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.record;
import org.apache.kafka.common.InvalidRecordException;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.errors.CorruptRecordException;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.utils.AbstractIterator;
import org.apache.kafka.common.utils.BufferSupplier;
import org.apache.kafka.common.utils.ByteBufferOutputStream;
import org.apache.kafka.common.utils.ByteUtils;
import org.apache.kafka.common.utils.CloseableIterator;
import org.apache.kafka.common.utils.Utils;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Objects;
import static org.apache.kafka.common.record.Records.LOG_OVERHEAD;
import static org.apache.kafka.common.record.Records.OFFSET_OFFSET;
/**
* This {@link RecordBatch} implementation is for magic versions 0 and 1. In addition to implementing
* {@link RecordBatch}, it also implements {@link Record}, which exposes the duality of the old message
* format in its handling of compressed messages. The wrapper record is considered the record batch in this
* interface, while the inner records are considered the log records (though they both share the same schema).
*
* In general, this class should not be used directly. Instances of {@link Records} provides access to this
* class indirectly through the {@link RecordBatch} interface.
*/
public abstract class AbstractLegacyRecordBatch extends AbstractRecordBatch implements Record {
public abstract LegacyRecord outerRecord();
@Override
public long lastOffset() {
return offset();
}
@Override
public boolean isValid() {
return outerRecord().isValid();
}
@Override
public void ensureValid() {
outerRecord().ensureValid();
}
@Override
public int keySize() {
return outerRecord().keySize();
}
@Override
public boolean hasKey() {
return outerRecord().hasKey();
}
@Override
public ByteBuffer key() {
return outerRecord().key();
}
@Override
public int valueSize() {
return outerRecord().valueSize();
}
@Override
public boolean hasValue() {
return !outerRecord().hasNullValue();
}
@Override
public ByteBuffer value() {
return outerRecord().value();
}
@Override
public Header[] headers() {
return Record.EMPTY_HEADERS;
}
@Override
public boolean hasMagic(byte magic) {
return magic == outerRecord().magic();
}
@Override
public boolean hasTimestampType(TimestampType timestampType) {
return outerRecord().timestampType() == timestampType;
}
@Override
public Long checksumOrNull() {
return checksum();
}
@Override
public long checksum() {
return outerRecord().checksum();
}
@Override
public long maxTimestamp() {
return timestamp();
}
@Override
public long timestamp() {
return outerRecord().timestamp();
}
@Override
public TimestampType timestampType() {
return outerRecord().timestampType();
}
@Override
public long baseOffset() {
return iterator().next().offset();
}
@Override
public byte magic() {
return outerRecord().magic();
}
@Override
public CompressionType compressionType() {
return outerRecord().compressionType();
}
@Override
public int sizeInBytes() {
return outerRecord().sizeInBytes() + LOG_OVERHEAD;
}
@Override
public Integer countOrNull() {
return null;
}
@Override
public String toString() {
return "LegacyRecordBatch(offset=" + offset() + ", " + outerRecord() + ")";
}
@Override
public void writeTo(ByteBuffer buffer) {
writeHeader(buffer, offset(), outerRecord().sizeInBytes());
buffer.put(outerRecord().buffer().duplicate());
}
@Override
public long producerId() {
return RecordBatch.NO_PRODUCER_ID;
}
@Override
public short producerEpoch() {
return RecordBatch.NO_PRODUCER_EPOCH;
}
@Override
public boolean hasProducerId() {
return false;
}
@Override
public int sequence() {
return RecordBatch.NO_SEQUENCE;
}
@Override
public int baseSequence() {
return RecordBatch.NO_SEQUENCE;
}
@Override
public int lastSequence() {
return RecordBatch.NO_SEQUENCE;
}
@Override
public boolean isTransactional() {
return false;
}
@Override
public int partitionLeaderEpoch() {
return RecordBatch.NO_PARTITION_LEADER_EPOCH;
}
@Override
public boolean isControlBatch() {
return false;
}
/**
* Get an iterator for the nested entries contained within this batch. Note that
* if the batch is not compressed, then this method will return an iterator over the
* shallow record only (i.e. this object).
* @return An iterator over the records contained within this batch
*/
@Override
public Iterator<Record> iterator() {
return iterator(BufferSupplier.NO_CACHING);
}
CloseableIterator<Record> iterator(BufferSupplier bufferSupplier) {
if (isCompressed())
return new DeepRecordsIterator(this, false, Integer.MAX_VALUE, bufferSupplier);
return new CloseableIterator<Record>() {
private boolean hasNext = true;
@Override
public void close() {}
@Override
public boolean hasNext() {
return hasNext;
}
@Override
public Record next() {
if (!hasNext)
throw new NoSuchElementException();
hasNext = false;
return AbstractLegacyRecordBatch.this;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public CloseableIterator<Record> streamingIterator(BufferSupplier bufferSupplier) {
// the older message format versions do not support streaming, so we return the normal iterator
return iterator(bufferSupplier);
}
static void writeHeader(ByteBuffer buffer, long offset, int size) {
buffer.putLong(offset);
buffer.putInt(size);
}
static void writeHeader(DataOutputStream out, long offset, int size) throws IOException {
out.writeLong(offset);
out.writeInt(size);
}
private static final class DataLogInputStream implements LogInputStream<AbstractLegacyRecordBatch> {
private final InputStream stream;
protected final int maxMessageSize;
private final ByteBuffer offsetAndSizeBuffer;
DataLogInputStream(InputStream stream, int maxMessageSize) {
this.stream = stream;
this.maxMessageSize = maxMessageSize;
this.offsetAndSizeBuffer = ByteBuffer.allocate(Records.LOG_OVERHEAD);
}
public AbstractLegacyRecordBatch nextBatch() throws IOException {
offsetAndSizeBuffer.clear();
Utils.readFully(stream, offsetAndSizeBuffer);
if (offsetAndSizeBuffer.hasRemaining())
return null;
long offset = offsetAndSizeBuffer.getLong(Records.OFFSET_OFFSET);
int size = offsetAndSizeBuffer.getInt(Records.SIZE_OFFSET);
if (size < LegacyRecord.RECORD_OVERHEAD_V0)
throw new CorruptRecordException(String.format("Record size is less than the minimum record overhead (%d)", LegacyRecord.RECORD_OVERHEAD_V0));
if (size > maxMessageSize)
throw new CorruptRecordException(String.format("Record size exceeds the largest allowable message size (%d).", maxMessageSize));
ByteBuffer batchBuffer = ByteBuffer.allocate(size);
Utils.readFully(stream, batchBuffer);
if (batchBuffer.hasRemaining())
return null;
batchBuffer.flip();
return new BasicLegacyRecordBatch(offset, new LegacyRecord(batchBuffer));
}
}
private static class DeepRecordsIterator extends AbstractIterator<Record> implements CloseableIterator<Record> {
private final ArrayDeque<AbstractLegacyRecordBatch> innerEntries;
private final long absoluteBaseOffset;
private final byte wrapperMagic;
private DeepRecordsIterator(AbstractLegacyRecordBatch wrapperEntry,
boolean ensureMatchingMagic,
int maxMessageSize,
BufferSupplier bufferSupplier) {
LegacyRecord wrapperRecord = wrapperEntry.outerRecord();
this.wrapperMagic = wrapperRecord.magic();
if (wrapperMagic != RecordBatch.MAGIC_VALUE_V0 && wrapperMagic != RecordBatch.MAGIC_VALUE_V1)
throw new InvalidRecordException("Invalid wrapper magic found in legacy deep record iterator " + wrapperMagic);
CompressionType compressionType = wrapperRecord.compressionType();
if (compressionType == CompressionType.ZSTD)
throw new InvalidRecordException("Invalid wrapper compressionType found in legacy deep record iterator " + wrapperMagic);
ByteBuffer wrapperValue = wrapperRecord.value();
if (wrapperValue == null)
throw new InvalidRecordException("Found invalid compressed record set with null value (magic = " +
wrapperMagic + ")");
InputStream stream = compressionType.wrapForInput(wrapperValue, wrapperRecord.magic(), bufferSupplier);
LogInputStream<AbstractLegacyRecordBatch> logStream = new DataLogInputStream(stream, maxMessageSize);
long lastOffsetFromWrapper = wrapperEntry.lastOffset();
long timestampFromWrapper = wrapperRecord.timestamp();
this.innerEntries = new ArrayDeque<>();
// If relative offset is used, we need to decompress the entire message first to compute
// the absolute offset. For simplicity and because it's a format that is on its way out, we
// do the same for message format version 0
try {
while (true) {
AbstractLegacyRecordBatch innerEntry = logStream.nextBatch();
if (innerEntry == null)
break;
LegacyRecord record = innerEntry.outerRecord();
byte magic = record.magic();
if (ensureMatchingMagic && magic != wrapperMagic)
throw new InvalidRecordException("Compressed message magic " + magic +
" does not match wrapper magic " + wrapperMagic);
if (magic == RecordBatch.MAGIC_VALUE_V1) {
LegacyRecord recordWithTimestamp = new LegacyRecord(
record.buffer(),
timestampFromWrapper,
wrapperRecord.timestampType());
innerEntry = new BasicLegacyRecordBatch(innerEntry.lastOffset(), recordWithTimestamp);
}
innerEntries.addLast(innerEntry);
}
if (innerEntries.isEmpty())
throw new InvalidRecordException("Found invalid compressed record set with no inner records");
if (wrapperMagic == RecordBatch.MAGIC_VALUE_V1) {
if (lastOffsetFromWrapper == 0) {
// The outer offset may be 0 if this is produce data from certain versions of librdkafka.
this.absoluteBaseOffset = 0;
} else {
long lastInnerOffset = innerEntries.getLast().offset();
if (lastOffsetFromWrapper < lastInnerOffset)
throw new InvalidRecordException("Found invalid wrapper offset in compressed v1 message set, " +
"wrapper offset '" + lastOffsetFromWrapper + "' is less than the last inner message " +
"offset '" + lastInnerOffset + "' and it is not zero.");
this.absoluteBaseOffset = lastOffsetFromWrapper - lastInnerOffset;
}
} else {
this.absoluteBaseOffset = -1;
}
} catch (IOException e) {
throw new KafkaException(e);
} finally {
Utils.closeQuietly(stream, "records iterator stream");
}
}
@Override
protected Record makeNext() {
if (innerEntries.isEmpty())
return allDone();
AbstractLegacyRecordBatch entry = innerEntries.remove();
// Convert offset to absolute offset if needed.
if (wrapperMagic == RecordBatch.MAGIC_VALUE_V1) {
long absoluteOffset = absoluteBaseOffset + entry.offset();
entry = new BasicLegacyRecordBatch(absoluteOffset, entry.outerRecord());
}
if (entry.isCompressed())
throw new InvalidRecordException("Inner messages must not be compressed");
return entry;
}
@Override
public void close() {}
}
private static class BasicLegacyRecordBatch extends AbstractLegacyRecordBatch {
private final LegacyRecord record;
private final long offset;
private BasicLegacyRecordBatch(long offset, LegacyRecord record) {
this.offset = offset;
this.record = record;
}
@Override
public long offset() {
return offset;
}
@Override
public LegacyRecord outerRecord() {
return record;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
BasicLegacyRecordBatch that = (BasicLegacyRecordBatch) o;
return offset == that.offset &&
Objects.equals(record, that.record);
}
@Override
public int hashCode() {
int result = record != null ? record.hashCode() : 0;
result = 31 * result + Long.hashCode(offset);
return result;
}
}
static class ByteBufferLegacyRecordBatch extends AbstractLegacyRecordBatch implements MutableRecordBatch {
private final ByteBuffer buffer;
private final LegacyRecord record;
ByteBufferLegacyRecordBatch(ByteBuffer buffer) {
this.buffer = buffer;
buffer.position(LOG_OVERHEAD);
this.record = new LegacyRecord(buffer.slice());
buffer.position(OFFSET_OFFSET);
}
@Override
public long offset() {
return buffer.getLong(OFFSET_OFFSET);
}
@Override
public LegacyRecord outerRecord() {
return record;
}
@Override
public void setLastOffset(long offset) {
buffer.putLong(OFFSET_OFFSET, offset);
}
@Override
public void setMaxTimestamp(TimestampType timestampType, long timestamp) {
if (record.magic() == RecordBatch.MAGIC_VALUE_V0)
throw new UnsupportedOperationException("Cannot set timestamp for a record with magic = 0");
long currentTimestamp = record.timestamp();
// We don't need to recompute crc if the timestamp is not updated.
if (record.timestampType() == timestampType && currentTimestamp == timestamp)
return;
setTimestampAndUpdateCrc(timestampType, timestamp);
}
@Override
public void setPartitionLeaderEpoch(int epoch) {
throw new UnsupportedOperationException("Magic versions prior to 2 do not support partition leader epoch");
}
private void setTimestampAndUpdateCrc(TimestampType timestampType, long timestamp) {
byte attributes = LegacyRecord.computeAttributes(magic(), compressionType(), timestampType);
buffer.put(LOG_OVERHEAD + LegacyRecord.ATTRIBUTES_OFFSET, attributes);
buffer.putLong(LOG_OVERHEAD + LegacyRecord.TIMESTAMP_OFFSET, timestamp);
long crc = record.computeChecksum();
ByteUtils.writeUnsignedInt(buffer, LOG_OVERHEAD + LegacyRecord.CRC_OFFSET, crc);
}
/**
* LegacyRecordBatch does not implement this iterator and would hence fallback to the normal iterator.
*
* @return An iterator over the records contained within this batch
*/
@Override
public CloseableIterator<Record> skipKeyValueIterator(BufferSupplier bufferSupplier) {
return CloseableIterator.wrap(iterator(bufferSupplier));
}
@Override
public void writeTo(ByteBufferOutputStream outputStream) {
outputStream.write(buffer.duplicate());
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
ByteBufferLegacyRecordBatch that = (ByteBufferLegacyRecordBatch) o;
return Objects.equals(buffer, that.buffer);
}
@Override
public int hashCode() {
return buffer != null ? buffer.hashCode() : 0;
}
}
static class LegacyFileChannelRecordBatch extends FileLogInputStream.FileChannelRecordBatch {
LegacyFileChannelRecordBatch(long offset,
byte magic,
FileRecords fileRecords,
int position,
int batchSize) {
super(offset, magic, fileRecords, position, batchSize);
}
@Override
protected RecordBatch toMemoryRecordBatch(ByteBuffer buffer) {
return new ByteBufferLegacyRecordBatch(buffer);
}
@Override
public long baseOffset() {
return loadFullBatch().baseOffset();
}
@Override
public long lastOffset() {
return offset;
}
@Override
public long producerId() {
return RecordBatch.NO_PRODUCER_ID;
}
@Override
public short producerEpoch() {
return RecordBatch.NO_PRODUCER_EPOCH;
}
@Override
public int baseSequence() {
return RecordBatch.NO_SEQUENCE;
}
@Override
public int lastSequence() {
return RecordBatch.NO_SEQUENCE;
}
@Override
public Integer countOrNull() {
return null;
}
@Override
public boolean isTransactional() {
return false;
}
@Override
public boolean isControlBatch() {
return false;
}
@Override
public int partitionLeaderEpoch() {
return RecordBatch.NO_PARTITION_LEADER_EPOCH;
}
@Override
protected int headerSize() {
return LOG_OVERHEAD + LegacyRecord.headerSize(magic);
}
}
}
| |
/*
* Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.nashorn.internal.runtime;
import static jdk.nashorn.internal.runtime.ECMAErrors.typeError;
import static jdk.nashorn.internal.runtime.ScriptRuntime.UNDEFINED;
import static jdk.nashorn.internal.lookup.Lookup.MH;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.util.Map;
import java.util.StringTokenizer;
/**
* Global functions supported only in scripting mode.
*/
public final class ScriptingFunctions {
/** Handle to implementation of {@link ScriptingFunctions#readLine} - Nashorn extension */
public static final MethodHandle READLINE = findOwnMH("readLine", Object.class, Object.class, Object.class);
/** Handle to implementation of {@link ScriptingFunctions#readFully} - Nashorn extension */
public static final MethodHandle READFULLY = findOwnMH("readFully", Object.class, Object.class, Object.class);
/** Handle to implementation of {@link ScriptingFunctions#exec} - Nashorn extension */
public static final MethodHandle EXEC = findOwnMH("exec", Object.class, Object.class, Object.class, Object.class);
/** EXEC name - special property used by $EXEC API. */
public static final String EXEC_NAME = "$EXEC";
/** OUT name - special property used by $EXEC API. */
public static final String OUT_NAME = "$OUT";
/** ERR name - special property used by $EXEC API. */
public static final String ERR_NAME = "$ERR";
/** EXIT name - special property used by $EXEC API. */
public static final String EXIT_NAME = "$EXIT";
/** Names of special properties used by $ENV API. */
public static final String ENV_NAME = "$ENV";
private static final String PWD_NAME = "PWD";
private ScriptingFunctions() {
}
/**
* Nashorn extension: global.readLine (scripting-mode-only)
* Read one line of input from the standard input.
*
* @param self self reference
* @param prompt String used as input prompt
*
* @return line that was read
*
* @throws IOException if an exception occurs
*/
public static Object readLine(final Object self, final Object prompt) throws IOException {
if (prompt != UNDEFINED) {
System.out.print(JSType.toString(prompt));
}
final BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
return reader.readLine();
}
/**
* Nashorn extension: Read the entire contents of a text file and return as String.
*
* @param self self reference
* @param file The input file whose content is read.
*
* @return String content of the input file.
*
* @throws IOException if an exception occurs
*/
public static Object readFully(final Object self, final Object file) throws IOException {
File f = null;
if (file instanceof File) {
f = (File)file;
} else if (file instanceof String) {
f = new java.io.File((String)file);
}
if (f == null || !f.isFile()) {
throw typeError("not.a.file", ScriptRuntime.safeToString(file));
}
return new String(Source.readFully(f));
}
/**
* Nashorn extension: exec a string in a separate process.
*
* @param self self reference
* @param string string to execute
* @param input input
*
* @return output string from the request
* @throws IOException if any stream access fails
* @throws InterruptedException if execution is interrupted
*/
public static Object exec(final Object self, final Object string, final Object input) throws IOException, InterruptedException {
// Current global is need to fetch additional inputs and for additional results.
final ScriptObject global = Context.getGlobal();
// Break exec string into tokens.
final StringTokenizer tokenizer = new StringTokenizer(JSType.toString(string));
final String[] cmdArray = new String[tokenizer.countTokens()];
for (int i = 0; tokenizer.hasMoreTokens(); i++) {
cmdArray[i] = tokenizer.nextToken();
}
// Set up initial process.
final ProcessBuilder processBuilder = new ProcessBuilder(cmdArray);
// Current ENV property state.
final Object env = global.get(ENV_NAME);
if (env instanceof ScriptObject) {
final ScriptObject envProperties = (ScriptObject)env;
// If a working directory is present, use it.
final Object pwd = envProperties.get(PWD_NAME);
if (pwd != UNDEFINED) {
processBuilder.directory(new File(JSType.toString(pwd)));
}
// Set up ENV variables.
final Map<String, String> environment = processBuilder.environment();
environment.clear();
for (Map.Entry<Object, Object> entry : envProperties.entrySet()) {
environment.put(JSType.toString(entry.getKey()), JSType.toString(entry.getValue()));
}
}
// Start the process.
final Process process = processBuilder.start();
final IOException exception[] = new IOException[2];
// Collect output.
final StringBuilder outBuffer = new StringBuilder();
Thread outThread = new Thread(new Runnable() {
@Override
public void run() {
char buffer[] = new char[1024];
try (final InputStreamReader inputStream = new InputStreamReader(process.getInputStream())) {
for (int length; (length = inputStream.read(buffer, 0, buffer.length)) != -1; ) {
outBuffer.append(buffer, 0, length);
}
} catch (IOException ex) {
exception[0] = ex;
}
}
}, "$EXEC output");
// Collect errors.
final StringBuilder errBuffer = new StringBuilder();
Thread errThread = new Thread(new Runnable() {
@Override
public void run() {
char buffer[] = new char[1024];
try (final InputStreamReader inputStream = new InputStreamReader(process.getErrorStream())) {
for (int length; (length = inputStream.read(buffer, 0, buffer.length)) != -1; ) {
errBuffer.append(buffer, 0, length);
}
} catch (IOException ex) {
exception[1] = ex;
}
}
}, "$EXEC error");
// Start gathering output.
outThread.start();
errThread.start();
// If input is present, pass on to process.
try (OutputStreamWriter outputStream = new OutputStreamWriter(process.getOutputStream())) {
if (input != UNDEFINED) {
String in = JSType.toString(input);
outputStream.write(in, 0, in.length());
}
} catch (IOException ex) {
// Process was not expecting input. May be normal state of affairs.
}
// Wait for the process to complete.
final int exit = process.waitFor();
outThread.join();
errThread.join();
final String out = outBuffer.toString();
final String err = errBuffer.toString();
// Set globals for secondary results.
global.set(OUT_NAME, out, false);
global.set(ERR_NAME, err, false);
global.set(EXIT_NAME, exit, false);
// Propagate exception if present.
for (int i = 0; i < exception.length; i++) {
if (exception[i] != null) {
throw exception[i];
}
}
// Return the result from stdout.
return out;
}
private static MethodHandle findOwnMH(final String name, final Class<?> rtype, final Class<?>... types) {
return MH.findStatic(MethodHandles.lookup(), ScriptingFunctions.class, name, MH.type(rtype, types));
}
}
| |
package net.community.chest.net.proto.text;
import java.util.Collection;
import java.util.Map;
import net.community.chest.ParsableString;
import net.community.chest.util.map.entries.StringPairEntry;
/**
* <P>Copyright 2007 as per GPLv2</P>
*
* <P>Used to analyze a server's identity</P>
* @author Lyor G.
* @since Oct 25, 2007 8:46:21 AM
*/
public abstract class NetServerIdentityAnalyzer {
protected NetServerIdentityAnalyzer ()
{
super();
}
/**
* Special characters (besides "whitespace" that are considered pattern argument delimiters
* @see #inetSkipWelcomeArgument(CharSequence cs, int startPos, int endPos)
*/
protected static final String InetNonIgnoredWelcomePatternChars="()[]{};";
/**
* Skips an "argument" - defined as any non-empty (and non-igonrable) sequence of characters
* @param cs character sequence to check
* @param startPos start position from which to start checking argument (inclusive) - Note:
* this position is assumed to be non-empty itself...
* @param endPos end position at which to to stop checking (exclusive)
* @return index of next position in sequence (<0 if error)
* @see #InetNonIgnoredWelcomePatternChars
*/
protected static final int inetSkipWelcomeArgument (final CharSequence cs, final int startPos, final int endPos)
{
for (int index=startPos; index < endPos; index++)
{
final char c=cs.charAt(index);
if (ParsableString.isEmptyChar(c) || ((-1) != InetNonIgnoredWelcomePatternChars.indexOf(c)))
return index;
}
return endPos;
}
/**
* The pattern modifier indicator itself
*/
public static final char INET_WPAT_MODIFIER='%';
/**
* Server type name
*/
public static final char INET_WPAT_TYPE='T';
/**
* Server version string
*/
public static final char INET_WPAT_VERSION='V';
/**
* Ignore string indicator - Note: if this is the last modifier, then rest
* of the welcome line is ignored. Otherwise, this is a placeholder for a
* single string argument.
*/
public static final char INET_WPAT_IGNORE='I';
/**
* Match an RFC822 date/time
*/
public static final char INET_WPAT_RFC822DATE='D';
/**
* Matches an <B>indefinite</B> number of characters. Note: must be followed
* by a delimiter to indicate when matching is done: e.g. "%*(" -> skip till ')' found
*/
public static final char INET_WPAT_SKIPTODELIM='*';
/**
* Extracts a server identity info component
* @param cs character sequence from which to extract the component
* @param startPos start position from which to look for component data end (inclusive) - Note:
* assumed to be non-empty
* @param endPos end position at which to to stop checking (exclusive)
* @param ptChar information component type character
* @param id The {@link StringPairEntry} information object to be updated
* @return index of next position in sequence (<0 if error)
* @see #inetSkipWelcomeArgument(CharSequence cs, int startPos, int endPos)
* @see #INET_WPAT_TYPE
* @see #INET_WPAT_VERSION
*/
protected static final int extractServerIdentityInfo (final CharSequence cs, final int startPos, final int endPos, final char ptChar, final StringPairEntry id)
{
final int valEnd=inetSkipWelcomeArgument(cs, startPos, endPos);
if (valEnd <= startPos) // we expect non-empty arguments
return (-11);
final CharSequence ptType=cs.subSequence(startPos, valEnd);
if (INET_WPAT_TYPE == ptChar)
id.setKey(ptType.toString());
else
id.setValue(ptType.toString());
return valEnd;
}
/**
* Matches the welcome line with the given pattern and initializes the information object (if successful)
* @param wl welcome line to be matched
* @param startPos start position in welcome line where matching should start
* @param wlLen number of characters available for parsing
* @param pattern pattern to match against
* @return If successful - server identity object as a {@link java.util.Map.Entry} whose
* key=type and value=version (null otherwise)
*/
public static final Map.Entry<String,String> matchServerIdentity (final CharSequence wl,
final int startPos,
final int wlLen,
final CharSequence pattern)
{
final int ptLen=(null == pattern) ? 0 : pattern.length(),
maxPos=startPos + wlLen;
if ((null == wl) || (startPos < 0) || (wlLen <= 0) || (ptLen <= 0) || (maxPos > wl.length()))
return null;
StringPairEntry id=null;
for (int wlPos=startPos, ptPos=0; (wlPos < maxPos) && (ptPos < ptLen); ptPos++)
{
/* ignore any spaces in the pattern or the welcome line */
{
final int newWlPos=ParsableString.findNonEmptyDataStart(wl, wlPos, maxPos);
if (newWlPos < wlPos)
wlPos = maxPos;
else
wlPos = newWlPos;
}
{
final int newPtPos=ParsableString.findNonEmptyDataStart(pattern, ptPos, ptLen);
if (newPtPos < ptPos)
ptPos = ptLen;
else
ptPos = newPtPos;
}
/* if either position exhausted then no need to check any further */
if ((wlPos >= maxPos) || (ptPos >= ptLen))
break;
// check if pattern modifier
char ptChar=pattern.charAt(ptPos);
if (INET_WPAT_MODIFIER != ptChar)
{
// make sure same character in welcome line as in pattern
if (wl.charAt(wlPos) != ptChar)
return null;
wlPos++;
continue;
}
ptPos++; // skip pattern modifier
if (ptPos >= ptLen)
break;
ptChar = pattern.charAt(ptPos);
switch(ptChar)
{
case INET_WPAT_SKIPTODELIM:
ptPos++;
ptChar = pattern.charAt(ptPos);
if ((ptPos >= ptLen) || (' ' == ptChar))
return null;
if ((wlPos=ParsableString.indexOf(wl, ptChar, wlPos, maxPos)) < 0)
return null;
/* fall through to normal comparison ... */
case INET_WPAT_MODIFIER : /* handles '%%' as well */
if (ptChar != wl.charAt(wlPos))
return null;
wlPos++;
break;
case INET_WPAT_RFC822DATE:
{
// find end of RFC822 date/time by searching for the GMT offset
int dtEnd=ParsableString.indexOf(wl, '+', wlPos, maxPos);
if ((dtEnd < wlPos) || (dtEnd >= maxPos))
{
dtEnd = ParsableString.indexOf(wl, '-', wlPos, maxPos);
if ((dtEnd < wlPos) || (dtEnd >= maxPos))
return null;
}
/* NOTE !!! we should actually make sure that there is a time value encoded...
* for now, we limit ourselves to checking that it is a 4-digit GMT offset
*/
for (dtEnd++, wlPos=dtEnd; wlPos < maxPos; wlPos++)
{
final char ch=wl.charAt(wlPos);
if ((ch < '0') || (ch > '9'))
break;
}
if ((wlPos - dtEnd) != 4) // make sure EXACTLY 4 digits have been read
return null;
// skip to check if there is a timezone comment
dtEnd = ParsableString.findNonEmptyDataStart(wl, wlPos, maxPos);
if ((dtEnd >= wlPos) && (dtEnd < maxPos))
wlPos = dtEnd;
else
wlPos = maxPos;
// if there is a timezone comment then skip it
if ((wlPos < maxPos) && ('(' == wl.charAt(wlPos)))
{
wlPos = ParsableString.indexOf(wl, ')', wlPos, maxPos);
if ((wlPos <= 0) || (wlPos >= maxPos))
return null;
wlPos++;
}
}
break;
case INET_WPAT_IGNORE :
/* check if ignore rest of welcome line */
if ((ptPos + 1) >= ptLen)
break;
/* ignore current alpha string */
if ((wlPos=inetSkipWelcomeArgument(wl, wlPos, maxPos)) < 0)
return null;
break;
case INET_WPAT_TYPE :
case INET_WPAT_VERSION :
/* check if have type/version override */
if (((ptPos+1) < ptLen) && ('=' == pattern.charAt(ptPos+1)))
{
ptPos += 2;
if ((ptPos >= ptLen) || (' ' == pattern.charAt(ptPos)))
return null;
if (null == id)
id = new StringPairEntry();
if ((ptPos=extractServerIdentityInfo(pattern, ptPos, ptLen, ptChar, id)) < 0)
return null;
ptPos--; // compensate for automatic increment by loop
}
else
{
if (null == id)
id = new StringPairEntry();
if ((wlPos=extractServerIdentityInfo(wl, wlPos, maxPos, ptChar, id)) < 0)
return null;
}
break;
default : // this point is reached for unknown modifier
return null;
} // end of handling modifier
} // end of scanning
if ((null == id) || id.isEmpty())
return null;
return id;
}
/**
* Analyzes the welcome line and determines the server type and version (if possible)
* @param wl original welcome line received from server
* @param startPos start position in welcome line to start matching
* @param len number of characters available for analysis
* @param patterns analysis patterns specifications
* @return If successful - server identity object as a {@link java.util.Map.Entry} whose
* key=type and value=version (null otherwise)
*/
public static final Map.Entry<String,String> getServerIdentity (final CharSequence wl,
final int startPos,
final int len,
final Collection<String> patterns)
{
if ((null == patterns) || (patterns.size() <= 0))
return null;
for (final String p : patterns)
{
final Map.Entry<String,String> id=matchServerIdentity(wl, startPos, len, p);
if (id != null)
return id;
}
// this point is reached if no match found for the supplied patterns
return null;
}
/**
* @return A {@link Collection} of known patterns against which to check the welcome line (may be null/empty)
*/
public abstract Collection<String> getKnownPatterns ();
/**
* Checks where the data that can be matched against a pattern begins, and returns an index AFTER this prefix.
* @param welcomeLine line to be checked
* @return index of next character in sequence to be matched for the pattern (<0 if error)
*/
public abstract int getWelcomePatternMatchStart (final CharSequence welcomeLine);
/**
* Analyzes the welcome line and determines the server type and version (if possible)
* @param wl original welcome line received from server
* @param startPos start position in welcome line to start matching
* @param len number of characters available for analysis
* @return If successful - server identity object as a {@link java.util.Map.Entry} whose
* key=type and value=version (null otherwise). <B>Note:</B> either type or value
* may be null/empty but not both
*/
public Map.Entry<String,String> getServerIdentity (final CharSequence wl, final int startPos, final int len)
{
return getServerIdentity(wl, startPos, len, getKnownPatterns());
}
/**
* Analyzes the welcome line and determines the server type and version (if possible)
* @param wl original welcome line received from server
* @param patterns A {@link Collection} of patterns to check
* @return If successful - server identity object as a {@link java.util.Map.Entry} whose
* key=type and value=version (null otherwise). <B>Note:</B> either type or value
* may be null/empty but not both
*/
public Map.Entry<String,String> getServerIdentity (final CharSequence wl, final Collection<String> patterns)
{
final int wLen=(null == wl) ? 0 : wl.length(),
startPos=(wLen <= 0) ? (-1) : getWelcomePatternMatchStart(wl);
if (startPos < 0)
return null;
return getServerIdentity(wl, startPos, (wLen - startPos), patterns);
}
/**
* Analyzes the welcome line and determines the server type and version (if possible)
* @param wl original welcome line received from server
* @return If successful - server identity object as a {@link java.util.Map.Entry} whose
* key=type and value=version (null otherwise). <B>Note:</B> either type or value
* may be null/empty but not both
*/
public Map.Entry<String,String> getServerIdentity (final CharSequence wl)
{
return getServerIdentity(wl, getKnownPatterns());
}
}
| |
package com.loomcom.symon;
import com.loomcom.symon.devices.Acia;
import com.loomcom.symon.devices.Acia6551;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.*;
public class AciaTest {
@Test
public void shouldTriggerInterruptOnRxFullIfRxIrqEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Enable RX IRQ
acia.write(2, 0x00);
acia.rxWrite('a');
verify(mockBus, atLeastOnce()).assertIrq();
}
@Test
public void shouldNotTriggerInterruptOnRxFullIfRxIrqNotEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Disable RX IRQ
acia.write(2, 0x02);
acia.rxWrite('a');
verify(mockBus, never()).assertIrq();
}
@Test
public void shouldTriggerInterruptOnTxEmptyIfTxIrqEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Enable TX IRQ, Disable RX IRQ
acia.write(2, 0x06);
// Write data
acia.write(0, 'a');
verify(mockBus, never()).assertIrq();
// Transmission should cause IRQ
acia.txRead(true);
verify(mockBus, atLeastOnce()).assertIrq();
}
@Test
public void shouldNotTriggerInterruptOnTxEmptyIfTxIrqNotEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Disable RX IRQ
acia.write(2, 0x02);
// Write data
acia.write(0, 'a');
// Transmission should not cause IRQ
acia.txRead(true);
verify(mockBus, never()).assertIrq();
}
@Test
public void shouldTriggerInterruptFlagOnRxFullIfRxIrqEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Enable RX IRQ
acia.write(2, 0x00);
acia.rxWrite('a');
// Receive should cause IRQ flag to be set
assertEquals(0x80, acia.read(0x0001, true) & 0x80);
}
@Test
public void shouldNotTriggerInterruptFlagOnRxFullIfRxIrqNotEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Disable RX IRQ
acia.write(2, 0x02);
acia.rxWrite('a');
// Receive should not cause IRQ flag to be set
assertEquals(0x00, acia.read(0x0001, true) & 0x80);
}
@Test
public void shouldTriggerInterruptFlagOnTxEmptyIfTxIrqEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Enable TX IRQ, Disable RX IRQ
acia.write(2, 0x06);
// Write data
acia.write(0, 'a');
verify(mockBus, never()).assertIrq();
// Transmission should cause IRQ flag to be set
acia.txRead(true);
assertEquals(0x80, acia.read(0x0001, true) & 0x80);
}
@Test
public void shouldNotTriggerInterruptFlagOnTxEmptyIfTxIrqNotEnabled() throws Exception {
Bus mockBus = mock(Bus.class);
Acia acia = new Acia6551(0x000);
acia.setBus(mockBus);
// Disable TX IRQ, Disable RX IRQ
acia.write(2, 0x02);
// Write data
acia.write(0, 'a');
// Transmission should not cause IRQ flag to be set
acia.txRead(true);
assertEquals(0x00, acia.read(0x0001, true) & 0x80);
}
@Test
public void newAciaShouldHaveTxEmptyStatus() throws Exception {
Acia acia = new Acia6551(0x000);
assertEquals(0x10, acia.read(0x0001, true));
}
@Test
public void aciaShouldHaveTxEmptyStatusOffIfTxHasData() throws Exception {
Acia acia = new Acia6551(0x000);
acia.txWrite('a');
assertEquals(0x00, acia.read(0x0001, true));
}
@Test
public void aciaShouldHaveRxFullStatusOffIfRxHasData() throws Exception {
Acia acia = new Acia6551(0x000);
acia.rxWrite('a');
assertEquals(0x18, acia.read(0x0001, true));
}
@Test
public void aciaShouldHaveTxEmptyAndRxFullStatusOffIfRxAndTxHaveData()
throws Exception {
Acia acia = new Acia6551(0x000);
acia.rxWrite('a');
acia.txWrite('b');
assertEquals(0x08, acia.read(0x0001, true));
}
@Test
public void aciaShouldOverrunAndReadShouldReset()
throws Exception {
Acia acia = new Acia6551(0x0000);
// overrun ACIA
acia.rxWrite('a');
acia.rxWrite('b');
assertEquals(0x04, acia.read(0x0001, true) & 0x04);
// read should reset
acia.rxRead(true);
assertEquals(0x00, acia.read(0x0001, true) & 0x04);
}
@Test
public void aciaShouldOverrunAndMemoryWindowReadShouldNotReset()
throws Exception {
Acia acia = new Acia6551(0x0000);
// overrun ACIA
acia.rxWrite('a');
acia.rxWrite('b');
assertEquals(0x04, acia.read(0x0001, true) & 0x04);
// memory window read should not reset
acia.rxRead(false);
assertEquals(0x04, acia.read(0x0001, true) & 0x04);
}
@Test
public void readingBuffersShouldResetStatus()
throws Exception {
Acia acia = new Acia6551(0x0000);
acia.rxWrite('a');
acia.txWrite('b');
assertEquals(0x08, acia.read(0x0001, true));
assertEquals('a', acia.rxRead(true));
assertEquals('b', acia.txRead(true));
assertEquals(0x10, acia.read(0x0001, true));
}
@Test
public void A()
throws Exception {
Acia acia = new Acia6551(0x0000);
acia.rxWrite('a');
acia.txWrite('b');
assertEquals(0x08, acia.read(0x0001, true));
assertEquals('a', acia.rxRead(false));
assertEquals('b', acia.txRead(false));
assertEquals(0x08, acia.read(0x0001, true));
}
}
| |
/*
* Copyright 2012 Medical Research Council Harwell.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mousephenotype.dcc.crawler.entities;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import javax.persistence.Basic;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* @author Gagarine Yaikhom <g.yaikhom@har.mrc.ac.uk>
*/
@Entity
@Table(name = "xml_file", catalog = "phenodcc_tracker", schema = "")
@XmlRootElement
@NamedQueries({
@NamedQuery(name = "XmlFile.findAll", query = "SELECT x FROM XmlFile x"),
@NamedQuery(name = "XmlFile.findById", query = "SELECT x FROM XmlFile x WHERE x.id = :id"),
@NamedQuery(name = "XmlFile.findByFname", query = "SELECT x FROM XmlFile x WHERE x.fname = :fname"),
@NamedQuery(name = "XmlFile.findByZipFname", query = "SELECT x FROM XmlFile x WHERE (x.zipId = :zipId AND x.fname = :fname)"),
@NamedQuery(name = "XmlFile.findByCreated", query = "SELECT x FROM XmlFile x WHERE x.created = :created"),
@NamedQuery(name = "XmlFile.findBySizeBytes", query = "SELECT x FROM XmlFile x WHERE x.sizeBytes = :sizeBytes"),
@NamedQuery(name = "XmlFile.findByContentMd5", query = "SELECT x FROM XmlFile x WHERE x.contentMd5 = :contentMd5"),
@NamedQuery(name = "XmlFile.findByZipDownload", query = "SELECT x FROM XmlFile x WHERE (x.zipId = :zipId AND x.fname LIKE :pattern) ORDER BY x.created ASC"),
@NamedQuery(name = "XmlFile.findByPhaseStatusTypeAscCreated", query = "SELECT x FROM XmlFile x WHERE (x.phaseId = :phaseId AND x.statusId = :statusId AND x.fname LIKE :pattern) ORDER BY x.zipId.zfId.zaId.zipId.created ASC, x.zipId.zfId.zaId.zipId.inc ASC, x.created ASC, x.inc"),
@NamedQuery(name = "XmlFile.findByLastUpdate", query = "SELECT x FROM XmlFile x WHERE x.lastUpdate = :lastUpdate")})
public class XmlFile implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(nullable = false)
private Long id;
@Basic(optional = false)
@Column(nullable = false, length = 128)
private String fname;
@Basic(optional = true)
@Column(nullable = true)
@Temporal(TemporalType.DATE)
private Date created;
@Basic(optional = true)
@Column(nullable = true)
private Long inc;
@Basic(optional = true)
@Column(name = "size_bytes", nullable = true)
private long sizeBytes;
@Basic(optional = true)
@Column(name = "content_md5", nullable = true, length = 32)
private String contentMd5;
@Basic(optional = false)
@Column(name = "last_update", columnDefinition = "TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP", nullable = false, insertable = false, updatable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date lastUpdate;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "xmlId")
private Collection<XmlLog> xmlLogCollection;
@JoinColumn(name = "status_id", referencedColumnName = "id", nullable = false)
@ManyToOne(optional = false)
private AStatus statusId;
@JoinColumn(name = "phase_id", referencedColumnName = "id", nullable = false)
@ManyToOne(optional = false)
private Phase phaseId;
@JoinColumn(name = "zip_id", referencedColumnName = "id", nullable = false)
@ManyToOne(optional = false)
private ZipDownload zipId;
@JoinColumn(name = "centre_id", referencedColumnName = "id")
@ManyToOne
private Centre centreId;
public XmlFile() {
}
public XmlFile(ZipDownload zipId, String fname,
Phase phaseId, AStatus statusId){
this.zipId = zipId;
this.fname = fname;
this.phaseId = phaseId;
this.statusId = statusId;
}
public XmlFile(ZipDownload zipId, String fname,
Phase phaseId, AStatus statusId,
Centre centreId, Date created, Long inc, long sizeBytes){
this.zipId = zipId;
this.fname = fname;
this.centreId = centreId;
this.created = created;
this.inc = inc;
this.phaseId = phaseId;
this.statusId = statusId;
this.sizeBytes = sizeBytes;
}
public XmlFile(ZipDownload zipId, String fname,
Centre centreId, Date created, Long inc,
Phase phaseId, AStatus statusId,
long sizeBytes, String contentMd5) {
this.zipId = zipId;
this.fname = fname;
this.centreId = centreId;
this.created = created;
this.inc = inc;
this.phaseId = phaseId;
this.statusId = statusId;
this.sizeBytes = sizeBytes;
this.contentMd5 = contentMd5;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFname() {
return fname;
}
public void setFname(String fname) {
this.fname = fname;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Long getInc() {
return inc;
}
public void setInc(Long inc) {
this.inc = inc;
}
public long getSizeBytes() {
return sizeBytes;
}
public void setSizeBytes(long sizeBytes) {
this.sizeBytes = sizeBytes;
}
public String getContentMd5() {
return contentMd5;
}
public void setContentMd5(String contentMd5) {
this.contentMd5 = contentMd5;
}
public Date getLastUpdate() {
return lastUpdate;
}
public void setLastUpdate(Date lastUpdate) {
this.lastUpdate = lastUpdate;
}
@XmlTransient
public Collection<XmlLog> getXmlLogCollection() {
return xmlLogCollection;
}
public void setXmlLogCollection(Collection<XmlLog> xmlLogCollection) {
this.xmlLogCollection = xmlLogCollection;
}
public AStatus getStatusId() {
return statusId;
}
public void setStatusId(AStatus statusId) {
this.statusId = statusId;
}
public Phase getPhaseId() {
return phaseId;
}
public void setPhaseId(Phase phaseId) {
this.phaseId = phaseId;
}
public ZipDownload getZipId() {
return zipId;
}
public void setZipId(ZipDownload zipId) {
this.zipId = zipId;
}
public Centre getCentreId() {
return centreId;
}
public void setCentreId(Centre centreId) {
this.centreId = centreId;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.compute.v2020_06_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.AzureServiceFuture;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.ListOperationCallback;
import com.microsoft.azure.Page;
import com.microsoft.azure.PagedList;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.Url;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in Usages.
*/
public class UsagesInner {
/** The Retrofit service to perform REST calls. */
private UsagesService service;
/** The service client containing this operation class. */
private ComputeManagementClientImpl client;
/**
* Initializes an instance of UsagesInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public UsagesInner(Retrofit retrofit, ComputeManagementClientImpl client) {
this.service = retrofit.create(UsagesService.class);
this.client = client;
}
/**
* The interface defining all the services for Usages to be
* used by Retrofit to perform actually REST calls.
*/
interface UsagesService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.Usages list" })
@GET("subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/usages")
Observable<Response<ResponseBody>> list(@Path("location") String location, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.compute.v2020_06_01.Usages listNext" })
@GET
Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<UsageInner> object if successful.
*/
public PagedList<UsageInner> list(final String location) {
ServiceResponse<Page<UsageInner>> response = listSinglePageAsync(location).toBlocking().single();
return new PagedList<UsageInner>(response.body()) {
@Override
public Page<UsageInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<UsageInner>> listAsync(final String location, final ListOperationCallback<UsageInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listSinglePageAsync(location),
new Func1<String, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<Page<UsageInner>> listAsync(final String location) {
return listWithServiceResponseAsync(location)
.map(new Func1<ServiceResponse<Page<UsageInner>>, Page<UsageInner>>() {
@Override
public Page<UsageInner> call(ServiceResponse<Page<UsageInner>> response) {
return response.body();
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<ServiceResponse<Page<UsageInner>>> listWithServiceResponseAsync(final String location) {
return listSinglePageAsync(location)
.concatMap(new Func1<ServiceResponse<Page<UsageInner>>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(ServiceResponse<Page<UsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
ServiceResponse<PageImpl1<UsageInner>> * @param location The location for which resource usage is queried.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<UsageInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<UsageInner>>> listSinglePageAsync(final String location) {
if (location == null) {
throw new IllegalArgumentException("Parameter location is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
final String apiVersion = "2020-06-01";
return service.list(location, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<UsageInner>> result = listDelegate(response);
return Observable.just(new ServiceResponse<Page<UsageInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<UsageInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<UsageInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<UsageInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PagedList<UsageInner> object if successful.
*/
public PagedList<UsageInner> listNext(final String nextPageLink) {
ServiceResponse<Page<UsageInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single();
return new PagedList<UsageInner>(response.body()) {
@Override
public Page<UsageInner> nextPage(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink).toBlocking().single().body();
}
};
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @param serviceFuture the ServiceFuture object tracking the Retrofit calls
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<UsageInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<UsageInner>> serviceFuture, final ListOperationCallback<UsageInner> serviceCallback) {
return AzureServiceFuture.fromPageResponse(
listNextSinglePageAsync(nextPageLink),
new Func1<String, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(String nextPageLink) {
return listNextSinglePageAsync(nextPageLink);
}
},
serviceCallback);
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<Page<UsageInner>> listNextAsync(final String nextPageLink) {
return listNextWithServiceResponseAsync(nextPageLink)
.map(new Func1<ServiceResponse<Page<UsageInner>>, Page<UsageInner>>() {
@Override
public Page<UsageInner> call(ServiceResponse<Page<UsageInner>> response) {
return response.body();
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
* @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PagedList<UsageInner> object
*/
public Observable<ServiceResponse<Page<UsageInner>>> listNextWithServiceResponseAsync(final String nextPageLink) {
return listNextSinglePageAsync(nextPageLink)
.concatMap(new Func1<ServiceResponse<Page<UsageInner>>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(ServiceResponse<Page<UsageInner>> page) {
String nextPageLink = page.body().nextPageLink();
if (nextPageLink == null) {
return Observable.just(page);
}
return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink));
}
});
}
/**
* Gets, for the specified location, the current compute resource usage information as well as the limits for compute resources under the subscription.
*
ServiceResponse<PageImpl1<UsageInner>> * @param nextPageLink The NextLink from the previous successful call to List operation.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the PagedList<UsageInner> object wrapped in {@link ServiceResponse} if successful.
*/
public Observable<ServiceResponse<Page<UsageInner>>> listNextSinglePageAsync(final String nextPageLink) {
if (nextPageLink == null) {
throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null.");
}
String nextUrl = String.format("%s", nextPageLink);
return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<UsageInner>>>>() {
@Override
public Observable<ServiceResponse<Page<UsageInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<UsageInner>> result = listNextDelegate(response);
return Observable.just(new ServiceResponse<Page<UsageInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<UsageInner>> listNextDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<UsageInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<UsageInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package example.loadmanually;
import java.sql.*;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
/**
*
* @author jmarranz
*/
public class SimpleConnectionWrapper implements Connection
{
protected SimpleDataSource dataSource;
protected Connection conn;
protected int index;
protected boolean inuse = false;
public SimpleConnectionWrapper(SimpleDataSource dataSource,Connection conn,int index)
{
this.dataSource = dataSource;
this.conn = conn;
this.index = index;
}
public void holdConnection()
{
this.inuse = true;
}
public void releaseConnection()
{
if (!inuse) return; // Ya liberada
this.inuse = false;
}
public boolean isInUse()
{
return inuse;
}
public Connection getInternalConnection()
{
return conn;
}
@Override
public Statement createStatement() throws SQLException {
return conn.createStatement();
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return conn.prepareStatement(sql);
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
return conn.prepareCall(sql);
}
@Override
public String nativeSQL(String sql) throws SQLException {
return conn.nativeSQL(sql);
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
conn.setAutoCommit(autoCommit);
}
@Override
public boolean getAutoCommit() throws SQLException {
return conn.getAutoCommit();
}
@Override
public void commit() throws SQLException {
conn.commit();
}
@Override
public void rollback() throws SQLException {
conn.rollback();
}
@Override
public void close() throws SQLException {
dataSource.releaseConnection(this);
}
@Override
public boolean isClosed() throws SQLException {
if (isInUse()) return conn.isClosed();
else return true;
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return conn.getMetaData();
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
conn.setReadOnly(readOnly);
}
@Override
public boolean isReadOnly() throws SQLException {
return conn.isReadOnly();
}
@Override
public void setCatalog(String catalog) throws SQLException {
conn.setCatalog(catalog);
}
@Override
public String getCatalog() throws SQLException {
return conn.getCatalog();
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
conn.setTransactionIsolation(level);
}
@Override
public int getTransactionIsolation() throws SQLException {
return conn.getTransactionIsolation();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return conn.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
conn.clearWarnings();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
return conn.createStatement(resultSetType,resultSetConcurrency);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return conn.prepareStatement(sql,resultSetType,resultSetConcurrency);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return conn.prepareCall(sql,resultSetType,resultSetConcurrency);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
return conn.getTypeMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
conn.setTypeMap(map);
}
@Override
public void setHoldability(int holdability) throws SQLException {
conn.setHoldability(holdability);
}
@Override
public int getHoldability() throws SQLException {
return conn.getHoldability();
}
@Override
public Savepoint setSavepoint() throws SQLException {
return conn.setSavepoint();
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
return conn.setSavepoint(name);
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
conn.rollback(savepoint);
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
conn.releaseSavepoint(savepoint);
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return conn.createStatement(resultSetType,resultSetConcurrency,resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return conn.prepareStatement(sql,resultSetType,resultSetConcurrency,resultSetHoldability);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return conn.prepareCall(sql,resultSetType,resultSetConcurrency,resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
return conn.prepareStatement(sql,autoGeneratedKeys);
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
return conn.prepareStatement(sql,columnIndexes);
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
return conn.prepareStatement(sql,columnNames);
}
@Override
public Clob createClob() throws SQLException {
return conn.createClob();
}
@Override
public Blob createBlob() throws SQLException {
return conn.createBlob();
}
@Override
public NClob createNClob() throws SQLException {
return conn.createNClob();
}
@Override
public SQLXML createSQLXML() throws SQLException {
return conn.createSQLXML();
}
@Override
public boolean isValid(int timeout) throws SQLException {
return conn.isValid(timeout);
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
conn.setClientInfo(name,value);
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
conn.setClientInfo(properties);
}
@Override
public String getClientInfo(String name) throws SQLException {
return conn.getClientInfo(name);
}
@Override
public Properties getClientInfo() throws SQLException {
return conn.getClientInfo();
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
return conn.createArrayOf(typeName,elements);
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
return conn.createStruct(typeName,attributes);
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return conn.unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return conn.isWrapperFor(iface);
}
public void setSchema(String schema) throws SQLException
{
throw new UnsupportedOperationException("Not supported yet.");
}
public String getSchema() throws SQLException
{
throw new UnsupportedOperationException("Not supported yet.");
}
public void abort(Executor executor) throws SQLException
{
throw new UnsupportedOperationException("Not supported yet.");
}
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException
{
throw new UnsupportedOperationException("Not supported yet.");
}
public int getNetworkTimeout() throws SQLException
{
throw new UnsupportedOperationException("Not supported yet.");
}
}
| |
/*
* Copyright 2014 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package org.debezium.driver;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import org.debezium.Configuration;
import org.debezium.driver.SecurityProvider.CompositeAction;
import org.debezium.message.Batch;
import org.debezium.message.Document;
import org.debezium.message.Message;
import org.debezium.message.Patch;
import org.debezium.message.Patch.Editor;
import org.debezium.message.Patch.Operation;
import org.debezium.message.Topic;
import org.debezium.message.Value;
import org.debezium.model.ChangeStatus;
import org.debezium.model.DatabaseId;
import org.debezium.model.Entity;
import org.debezium.model.EntityChange;
import org.debezium.model.EntityCollection;
import org.debezium.model.EntityId;
import org.debezium.model.EntityType;
import org.debezium.model.EntityTypeChange;
import org.debezium.model.Identifier;
import org.debezium.model.Schema;
import org.debezium.util.Clock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Randall Hauch
*
*/
final class DbzDriver implements DebeziumDriver {
private final Configuration config;
private final Logger logger = LoggerFactory.getLogger(getClass());
private final Environment env;
private final DbzNode node;
private final DbzDatabases databases;
private final DbzPartialResponses partialResponses;
private final Clock clock;
DbzDriver(Configuration config, Environment env) {
this.config = config;
this.env = env;
this.clock = this.env.getClock();
this.node = new DbzNode(this.config, env);
this.partialResponses = new DbzPartialResponses();
this.databases = new DbzDatabases(this.partialResponses);
this.node.add(this.databases, this.partialResponses);
}
@Override
public Configuration getConfiguration() {
return config;
}
DbzDriver start() {
node.start();
return this;
}
private void logConnection(SessionToken token, String[] databaseIds, long durationInNanos, String action) {
env.getSecurity().info(token, (username, device, appVersion) -> {
// TODO: record information in metrics
});
}
private void logUsage(SessionToken token, String databaseId, long durationInNanos, String action) {
env.getSecurity().info(token, (username, device, appVersion) -> {
// TODO: record information in metrics
});
}
private void logUsage(SessionToken token, String databaseId, long durationInNanos, String action, String field, Object value) {
env.getSecurity().info(token, (username, device, appVersion) -> {
// TODO: record information in metrics
});
}
private void logUsage(SessionToken token, String databaseId, long durationInNanos, String action, String field1, Object value1,
String field2, Object value2) {
env.getSecurity().info(token, (username, device, appVersion) -> {
// TODO: record information in metrics
});
}
private long duration(long start) {
return env.getClock().currentTimeInNanos() - start;
}
private DebeziumClientException notRunning() {
return new DebeziumClientException("The Debezium driver is not running");
}
@Override
public SessionToken connect(String username, String device, String appVersion, String... databaseIds) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
SessionToken token = null;
Set<String> existingDbIds = databases.existing(databaseIds);
token = env.getSecurity().authenticate(username, device, appVersion, existingDbIds, databaseIds);
if (token == null) {
throw new DebeziumAuthorizationException("Unable to authenticate user '" + username + "' for databases: " + databaseIds);
}
logConnection(token, databaseIds, duration(start), "connect");
return token;
}).orElseThrow(this::notRunning);
}
@Override
public void provision(SessionToken adminToken, String databaseId, long timeout, TimeUnit unit) {
node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
String username = env.getSecurity().canAdminister(adminToken, databaseId);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to read schema for database '" + databaseId + "'");
}
databases.provision(username, Identifier.of(databaseId), timeout, unit);
logUsage(adminToken, databaseId, duration(start), "provision");
return Boolean.TRUE;
}).orElseThrow(this::notRunning);
}
@Override
public Schema readSchema(SessionToken token, String databaseId, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
String username = env.getSecurity().canRead(token, databaseId);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to read schema for database '" + databaseId + "'");
}
Schema schema = databases.readSchema(databaseId);
logUsage(token, databaseId, duration(start), "readSchema");
return schema;
}).orElseThrow(this::notRunning);
}
@Override
public EntityTypeChange changeEntityType(SessionToken token, Patch<EntityType> patch, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
// Check the privilege first ...
EntityType typeId = patch.target();
String entityType = typeId.entityTypeName();
DatabaseId dbId = typeId.databaseId();
String databaseName = dbId.asString();
String username = env.getSecurity().canAdminister(token, databaseName);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to change the entity type '" + entityType + "' in the database '" + dbId + "'");
}
logger.debug("Attempting to change entity type '{}' in database '{}' with patch: {}", entityType, dbId, patch);
return partialResponses.submit(EntityTypeChange.class, requestId -> {
logger.trace("Attempting to submit request to change entity type '{} 'in database '{}'", entityType, dbId);
Document request = patch.asDocument();
Message.addHeaders(request, requestId.getClientId(), requestId.getRequestNumber(), username);
if (!node.send(Topic.SCHEMA_PATCHES, databaseName, request)) {
throw new DebeziumClientException("Unable to send request to change the entity type '" + entityType + "' in the database '" + dbId + "'");
}
}).onResponse(timeout, unit, response -> {
logger.trace("Received response from changing entity type '{}' in database '{}'", entityType, dbId);
EntityType id = Message.getEntityType(response);
Document representation = Message.getAfterOrBefore(response);
ChangeStatus status = null;
Collection<String> failureReasons = null;
switch (Message.getStatus(response)) {
case SUCCESS:
status = ChangeStatus.OK;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntityType", "found", true, "changed", true);
logger.trace("Successfully changed entity type '{}'", id);
break;
case PATCH_FAILED:
status = ChangeStatus.PATCH_FAILED;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntityType", "found", true, "changed", false);
logger.trace("Unable to apply patch to change entity type '{}'", id);
break;
case DOES_NOT_EXIST:
status = ChangeStatus.DOES_NOT_EXIST;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntityType", "found", false, "changed", false);
logger.trace("Unable to find entity type '{}'", id);
break;
}
EntityCollection collection = EntityCollection.with(id, representation);
return EntityTypeChange.with(patch, collection, status, failureReasons);
}).onTimeout(() -> {
throw new DebeziumTimeoutException("The request to change entity type '" + entityType + "' in database '" + dbId + "' timed out");
});
}).orElseThrow(this::notRunning);
}
@Override
public Entity readEntity(SessionToken token, EntityId entityId, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
// Check the privilege first ...
DatabaseId dbId = entityId.databaseId();
String databaseName = dbId.asString();
String username = env.getSecurity().canRead(token, databaseName);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to read entity '" + entityId + "'");
}
logger.debug("Attempting to read entity '{}'", entityId);
return partialResponses.submit(Entity.class, requestId -> {
logger.trace("Attempting to submit request to read entity '{}'", entityId);
Document request = Patch.read(entityId).asDocument();
Message.addHeaders(request, requestId.getClientId(), requestId.getRequestNumber(), username);
if (!node.send(Topic.ENTITY_PATCHES, entityId.asString(), request)) {
throw new DebeziumClientException("Unable to send request to read entity '" + entityId + "'");
}
}).onResponse(timeout, unit, response -> {
logger.trace("Received response from reading entity '{}'", entityId);
EntityId id = Message.getEntityId(response);
Document representation = Message.getAfter(response);
if (representation != null) {
logUsage(token, databaseName, duration(start), "readEntity", "found", true);
logger.trace("Successfully read entity '{}'", entityId);
} else {
logUsage(token, databaseName, duration(start), "readEntity", "found", false);
logger.trace("Unable to find entity '{}'", entityId);
}
return Entity.with(id, representation);
}).onTimeout(() -> {
throw new DebeziumTimeoutException("The request to read entity '" + entityId + "' timed out");
});
}).orElseThrow(this::notRunning);
}
@Override
public EntityChange changeEntity(SessionToken token, Patch<EntityId> patch, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
// Check the privilege first ...
EntityId entityId = patch.target();
DatabaseId dbId = entityId.databaseId();
String databaseName = dbId.asString();
String username = env.getSecurity().canWrite(token, databaseName);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to change entity '" + entityId + "'");
}
logger.debug("Attempting to change entity '{}' with patch: {}", entityId, patch);
return partialResponses.submit(EntityChange.class, requestId -> {
logger.trace("Attempting to submit request to change entity '{}'", entityId);
Document request = patch.asDocument();
Message.addHeaders(request, requestId.getClientId(), requestId.getRequestNumber(), username);
if (!node.send(Topic.ENTITY_PATCHES, entityId.asString(), request)) {
throw new DebeziumClientException("Unable to send request to change entity '" + entityId + "'");
}
}).onResponse(timeout, unit, response -> {
logger.trace("Received response from changing entity '{}'", entityId);
EntityId id = Message.getEntityId(response);
Document representation = Message.getAfterOrBefore(response);
ChangeStatus status = null;
Collection<String> failureReasons = null;
switch (Message.getStatus(response)) {
case SUCCESS:
status = ChangeStatus.OK;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntity", "found", true, "changed", true);
logger.trace("Successfully changed entity '{}'", id);
break;
case PATCH_FAILED:
status = ChangeStatus.PATCH_FAILED;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntity", "found", true, "changed", false);
logger.trace("Unable to apply patch to change entity '{}'", id);
break;
case DOES_NOT_EXIST:
status = ChangeStatus.DOES_NOT_EXIST;
failureReasons = Message.getFailureReasons(response);
logUsage(token, databaseName, duration(start), "changeEntity", "found", false, "changed", false);
logger.trace("Unable to find entity '{}'", id);
break;
}
Entity entity = Entity.with(id, representation);
return EntityChange.with(patch, entity, status, failureReasons);
}).onTimeout(() -> {
throw new DebeziumTimeoutException("The request to change entity '" + entityId + "' timed out");
});
}).orElseThrow(this::notRunning);
}
@Override
public boolean destroyEntity(SessionToken token, EntityId entityId, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
// Check the privilege first ...
DatabaseId dbId = entityId.databaseId();
String databaseName = dbId.asString();
String username = env.getSecurity().canWrite(token, databaseName);
if (username == null) {
throw new DebeziumAuthorizationException("Unable to destroy entity '" + entityId + "'");
}
logger.debug("Attempting to destroy entity '{}'", entityId);
return partialResponses.submit(Boolean.class, requestId -> {
logger.trace("Attempting to submit request to destroy entity '{}'", entityId);
Document request = Patch.destroy(entityId).asDocument();
Message.addHeaders(request, requestId.getClientId(), requestId.getRequestNumber(), username);
if (!node.send(Topic.ENTITY_PATCHES, entityId.asString(), request)) {
throw new DebeziumClientException("Unable to send request to read entity '" + entityId + "'");
}
}).onResponse(timeout, unit, response -> {
logger.trace("Received response from destroying entity '{}'", entityId);
EntityId id = Message.getEntityId(response);
if (Message.getBefore(response) != null) {
logUsage(token, databaseName, duration(start), "destroyEntity", "succeed", true);
logger.trace("Successfully destroyed entity '{}'", id);
return true;
}
logUsage(token, databaseName, duration(start), "destroyEntity", "succeed", false);
logger.trace("Unable to find and destroy entity '{}'", id);
return false;
}).onTimeout(() -> {
throw new DebeziumTimeoutException("The request to destroy '" + entityId + "' timed out");
});
}).orElseThrow(this::notRunning);
}
@Override
public BatchBuilder batch() {
return new BatchBuilder() {
private final Batch.Builder<EntityId> batchBuilder = Batch.create();
@Override
public BatchBuilder readEntity(EntityId entityId) {
batchBuilder.read(entityId);
return this;
}
@Override
public BatchBuilder changeEntity(Patch<EntityId> patch) {
batchBuilder.patch(patch);
return this;
}
@Override
public Editor<BatchBuilder> createEntity(EntityType entityType) {
return changeEntity(Identifier.newEntity(entityType));
}
@Override
public Editor<BatchBuilder> changeEntity(EntityId entityId) {
Patch.Editor<Patch<EntityId>> editor = Patch.edit(entityId);
BatchBuilder builder = this;
return new Patch.Editor<BatchBuilder>() {
@Override
public Editor<BatchBuilder> add(String path, Value value) {
editor.add(path, value);
return this;
}
@Override
public Editor<BatchBuilder> copy(String fromPath, String toPath) {
editor.copy(fromPath, toPath);
return this;
}
@Override
public Editor<BatchBuilder> increment(String path, Number increment) {
editor.increment(path, increment);
return this;
}
@Override
public Editor<BatchBuilder> move(String fromPath, String toPath) {
editor.move(fromPath, toPath);
return this;
}
@Override
public Editor<BatchBuilder> remove(String path) {
editor.remove(path);
return this;
}
@Override
public Editor<BatchBuilder> replace(String path, Value newValue) {
editor.replace(path, newValue);
return this;
}
@Override
public Editor<BatchBuilder> require(String path, Value expectedValue) {
editor.require(path, expectedValue);
return this;
}
@Override
public BatchBuilder end() {
editor.endIfChanged().ifPresent(builder::changeEntity);
return builder;
}
@Override
public Optional<BatchBuilder> endIfChanged() {
editor.endIfChanged().ifPresent(builder::changeEntity);
return Optional.of(builder);
}
@Override
public Optional<BatchBuilder> endIfChanged(Predicate<Operation> significant) {
editor.endIfChanged(significant).ifPresent(builder::changeEntity);
return Optional.of(builder);
}
};
}
@Override
public BatchBuilder destroyEntity(EntityId entityId) {
batchBuilder.remove(entityId);
return this;
}
@Override
public BatchResult submit(SessionToken token, long timeout, TimeUnit unit) {
return submitBatch(token, batchBuilder.build(), timeout, unit); // resets batch builder each time
}
};
}
private BatchResult submitBatch(SessionToken token, Batch<EntityId> batch, long timeout, TimeUnit unit) {
return node.whenRunning(() -> {
long start = clock.currentTimeInNanos();
// Check the privilege first ...
CompositeAction check = env.getSecurity().check(token);
batch.forEach(patch -> {
String dbId = patch.target().databaseId().asString();
if (patch.isReadRequest())
check.canRead(dbId);
else if (patch.isDeletion())
check.canWrite(dbId);
else if (patch.isEmpty()) {
} else
check.canWrite(dbId);
});
String username = check.submit();
if (username == null) {
throw new DebeziumAuthorizationException("Unable to submit batch against database(s) " + check);
}
int count = batch.patchCount();
Map<String, Entity> reads = new ConcurrentHashMap<>();
Map<String, EntityChange> changes = new ConcurrentHashMap<>();
Set<String> destroys = new HashSet<>();
logger.debug("Attempting to submit batch with {} patches against database(s): {}", count, check);
partialResponses.submit(count, requestId -> {
Document request = batch.asDocument();
Message.addHeaders(request, requestId.getClientId(), requestId.getRequestNumber(), username);
if (!node.send(Topic.ENTITY_BATCHES, requestId.asString(), request)) {
throw new DebeziumClientException("Unable to send batch with " + count + " patches against database(s) " + check);
}
}).onEachResponse(timeout, unit, response -> {
EntityId id = Message.getEntityId(response);
Document representation = Message.getAfterOrBefore(response);
Patch<EntityId> patch = Patch.forEntity(response);
if (patch == null) {
// We read the entity ...
reads.put(id.asString(), Entity.with(id, representation));
} else if (patch.isDeletion()) {
destroys.add(id.asString());
} else {
Entity entity = Entity.with(id, representation);
ChangeStatus status = changeStatus(Message.getStatus(response));
Collection<String> failureReasons = Message.getFailureReasons(response);
changes.put(id.asString(), EntityChange.with(patch, entity, status, failureReasons));
}
}).onTimeout(() -> {
throw new DebeziumTimeoutException("The request timed out while submitting batch with " + count
+ " patches against database(s): " + check);
});
logUsage(token, null, duration(start), "submitBatch", "parts", count);
return new DbzBatchResult(reads, changes, destroys);
}).orElseThrow(DebeziumClientException::new);
}
private static ChangeStatus changeStatus(Message.Status messageStatus) {
switch (messageStatus) {
case SUCCESS:
return ChangeStatus.OK;
case PATCH_FAILED:
return ChangeStatus.PATCH_FAILED;
case DOES_NOT_EXIST:
return ChangeStatus.DOES_NOT_EXIST;
}
throw new IllegalStateException("Unknown status: " + messageStatus);
}
@Override
public void shutdown(long timeout, TimeUnit unit) {
// Shutdown the cluster node, which shuts down all services and the service manager ...
try {
node.shutdown();
} finally {
env.shutdown(timeout, unit);
}
}
}
| |
package foss.devmapal.axis_allies_calc.axis_allies_calc.test;
import android.os.AsyncTask;
import android.test.InstrumentationTestCase;
import java.util.ArrayList;
import java.util.Arrays;
import foss.devmapal.axis_allies_calc.axis_allies_calc.Army;
import foss.devmapal.axis_allies_calc.axis_allies_calc.BattleResult;
import foss.devmapal.axis_allies_calc.axis_allies_calc.LandBattleSimulation;
import foss.devmapal.axis_allies_calc.axis_allies_calc.MainActivity;
import foss.devmapal.axis_allies_calc.axis_allies_calc.NavalBattleSimulation;
import foss.devmapal.axis_allies_calc.axis_allies_calc.R;
import foss.devmapal.axis_allies_calc.axis_allies_calc.WeaponsDevelopment;
/**
* Created by devmapal on 4/28/14.
*/
public class SimulationTest extends InstrumentationTestCase {
static final double TOLERANCE = 0.05;
private ArrayList<Integer> land_attacker_hit_order;
private ArrayList<Integer> land_defender_hit_order;
private ArrayList<Integer> naval_hit_order;
@Override
protected void setUp() throws Exception {
super.setUp();
land_attacker_hit_order = new ArrayList<Integer>() {{
add(0); // Infantry
add(1); // Artillery
add(2); // Tank
add(3); // Fighter
add(4); // Bomber
}};
land_defender_hit_order = new ArrayList<Integer>() {{
add(4); // Bomber
add(0); // Infantry
add(1); // Artillery
add(2); // Tank
add(3); // Fighter
}};
naval_hit_order = new ArrayList<Integer>() {{
add(8); // Transport
add(9); // Submarine
add(3); // Fighter
add(6); // Destroyer
add(4); // Bomber
add(7); // Aircraftcarrier
add(5); // Battleship
}};
}
public void test1InfVs1Inf() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_infantry(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./4;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1InfVs1Artillery() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_artillery(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./4;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1InfVs1Tank() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_tanks(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./7;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1InfVs1Fighter() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./13;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1InfVs1JetFighter() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
defender_wd.jet_fighters = true;
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./31;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1InfVs1Bomber() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_bombers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*5./11;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1Inf() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_infantry(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*.4;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1Artillery() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_artillery(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*.4;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1Tank() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_tanks(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./4;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1Fighter() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./7;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1JetFighter() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
defender_wd.jet_fighters = true;
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./16;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1ArtilleryVs1Bomber() throws Exception {
Army attacker = new Army();
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_bombers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*5./8;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1Inf() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_infantry(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./2;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1Artillery() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_artillery(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./2;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1Tank() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_tanks(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./3;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1Fighter() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./5;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1JetFighter() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_fighters(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
defender_wd.jet_fighters = true;
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./11;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1TankVs1Bomber() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_bombers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*5./7;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void testNoUnits() throws Exception {
Army attacker = new Army();
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 0;
assertEquals(attacker_won, expected_result);
}
public void testNoDefender() throws Exception {
Army attacker = new Army();
attacker.set_tanks(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100;
assertEquals(attacker_won, expected_result);
}
public void testArtilleryInfSupport() throws Exception {
Army attacker = new Army();
attacker.set_infantry(1);
attacker.set_artillery(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_artillery(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
LandBattleSimulation battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
attacker.set_infantry(0);
attacker.set_artillery(2);
battle = new LandBattleSimulation(
task,
attacker,
attacker_wd,
land_attacker_hit_order,
defender,
defender_wd,
land_defender_hit_order,
100000,
true);
result = battle.run();
double attacker_won2 = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
assertTrue(Math.abs(attacker_won - attacker_won2) < TOLERANCE * 100);
}
public void test1TransportVs1Transport() throws Exception {
Army attacker = new Army();
attacker.set_transports(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_transports(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 0.;
assertTrue(attacker_won == expected_result);
}
public void test1TransportVs1Submarine() throws Exception {
Army attacker = new Army();
attacker.set_transports(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_submarines(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 0.;
assertTrue(attacker_won == expected_result);
}
public void test1TransportVs1Aircraftcarrier() throws Exception {
Army attacker = new Army();
attacker.set_transports(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_aircraftcarriers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 0.;
assertTrue(attacker_won == expected_result);
}
public void test1TransportVs1Destroyer() throws Exception {
Army attacker = new Army();
attacker.set_transports(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_destroyers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 0.;
assertTrue(attacker_won == expected_result);
}
public void test1DestroyerVs1Destroyer() throws Exception {
Army attacker = new Army();
attacker.set_destroyers(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_destroyers(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./3;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
public void test1DestroyerVs1Submarine() throws Exception {
Army attacker = new Army();
attacker.set_destroyers(1);
WeaponsDevelopment attacker_wd = new WeaponsDevelopment();
Army defender = new Army();
defender.set_submarines(1);
WeaponsDevelopment defender_wd = new WeaponsDevelopment();
AsyncTask<Void, Void, Void> task = new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
return null;
}
};
NavalBattleSimulation battle = new NavalBattleSimulation(
task,
attacker,
attacker_wd,
naval_hit_order,
defender,
defender_wd,
naval_hit_order,
100000);
BattleResult result = battle.run();
double attacker_won = ((double) result.get_attacker_won())/result.get_sim_iters()*100;
double expected_result = 100*1./2;
assertTrue(attacker_won/expected_result > 1 - TOLERANCE);
assertTrue(attacker_won/expected_result < 1 + TOLERANCE);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.page.PageReadStore;
import org.apache.parquet.schema.Type;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.execution.vectorized.ColumnVectorUtils;
import org.apache.spark.sql.execution.vectorized.ColumnarBatch;
import org.apache.spark.sql.execution.vectorized.WritableColumnVector;
import org.apache.spark.sql.execution.vectorized.OffHeapColumnVector;
import org.apache.spark.sql.execution.vectorized.OnHeapColumnVector;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
/**
* A specialized RecordReader that reads into InternalRows or ColumnarBatches directly using the
* Parquet column APIs. This is somewhat based on parquet-mr's ColumnReader.
*
* TODO: handle complex types, decimal requiring more than 8 bytes, INT96. Schema mismatch.
* All of these can be handled efficiently and easily with codegen.
*
* This class can either return InternalRows or ColumnarBatches. With whole stage codegen
* enabled, this class returns ColumnarBatches which offers significant performance gains.
* TODO: make this always return ColumnarBatches.
*/
public class VectorizedParquetRecordReader extends SpecificParquetRecordReaderBase<Object> {
/**
* Batch of rows that we assemble and the current index we've returned. Every time this
* batch is used up (batchIdx == numBatched), we populated the batch.
*/
private int batchIdx = 0;
private int numBatched = 0;
/**
* For each request column, the reader to read this column. This is NULL if this column
* is missing from the file, in which case we populate the attribute with NULL.
*/
private VectorizedColumnReader[] columnReaders;
/**
* The number of rows that have been returned.
*/
private long rowsReturned;
/**
* The number of rows that have been reading, including the current in flight row group.
*/
private long totalCountLoadedSoFar = 0;
/**
* For each column, true if the column is missing in the file and we'll instead return NULLs.
*/
private boolean[] missingColumns;
/**
* columnBatch object that is used for batch decoding. This is created on first use and triggers
* batched decoding. It is not valid to interleave calls to the batched interface with the row
* by row RecordReader APIs.
* This is only enabled with additional flags for development. This is still a work in progress
* and currently unsupported cases will fail with potentially difficult to diagnose errors.
* This should be only turned on for development to work on this feature.
*
* When this is set, the code will branch early on in the RecordReader APIs. There is no shared
* code between the path that uses the MR decoders and the vectorized ones.
*
* TODOs:
* - Implement v2 page formats (just make sure we create the correct decoders).
*/
private ColumnarBatch columnarBatch;
private WritableColumnVector[] columnVectors;
/**
* If true, this class returns batches instead of rows.
*/
private boolean returnColumnarBatch;
/**
* The default config on whether columnarBatch should be offheap.
*/
private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.ON_HEAP;
/**
* Implementation of RecordReader API.
*/
@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
throws IOException, InterruptedException, UnsupportedOperationException {
super.initialize(inputSplit, taskAttemptContext);
initializeInternal();
}
/**
* Utility API that will read all the data in path. This circumvents the need to create Hadoop
* objects to use this class. `columns` can contain the list of columns to project.
*/
@Override
public void initialize(String path, List<String> columns) throws IOException,
UnsupportedOperationException {
super.initialize(path, columns);
initializeInternal();
}
@Override
public void close() throws IOException {
if (columnarBatch != null) {
columnarBatch.close();
columnarBatch = null;
}
super.close();
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
resultBatch();
if (returnColumnarBatch) return nextBatch();
if (batchIdx >= numBatched) {
if (!nextBatch()) return false;
}
++batchIdx;
return true;
}
@Override
public Object getCurrentValue() throws IOException, InterruptedException {
if (returnColumnarBatch) return columnarBatch;
return columnarBatch.getRow(batchIdx - 1);
}
@Override
public float getProgress() throws IOException, InterruptedException {
return (float) rowsReturned / totalRowCount;
}
// Creates a columnar batch that includes the schema from the data files and the additional
// partition columns appended to the end of the batch.
// For example, if the data contains two columns, with 2 partition columns:
// Columns 0,1: data columns
// Column 2: partitionValues[0]
// Column 3: partitionValues[1]
public void initBatch(
MemoryMode memMode,
StructType partitionColumns,
InternalRow partitionValues) {
StructType batchSchema = new StructType();
for (StructField f: sparkSchema.fields()) {
batchSchema = batchSchema.add(f);
}
if (partitionColumns != null) {
for (StructField f : partitionColumns.fields()) {
batchSchema = batchSchema.add(f);
}
}
int capacity = ColumnarBatch.DEFAULT_BATCH_SIZE;
if (memMode == MemoryMode.OFF_HEAP) {
columnVectors = OffHeapColumnVector.allocateColumns(capacity, batchSchema);
} else {
columnVectors = OnHeapColumnVector.allocateColumns(capacity, batchSchema);
}
columnarBatch = new ColumnarBatch(batchSchema, columnVectors, capacity);
if (partitionColumns != null) {
int partitionIdx = sparkSchema.fields().length;
for (int i = 0; i < partitionColumns.fields().length; i++) {
ColumnVectorUtils.populate(columnVectors[i + partitionIdx], partitionValues, i);
columnVectors[i + partitionIdx].setIsConstant();
}
}
// Initialize missing columns with nulls.
for (int i = 0; i < missingColumns.length; i++) {
if (missingColumns[i]) {
columnVectors[i].putNulls(0, columnarBatch.capacity());
columnVectors[i].setIsConstant();
}
}
}
public void initBatch() {
initBatch(DEFAULT_MEMORY_MODE, null, null);
}
public void initBatch(StructType partitionColumns, InternalRow partitionValues) {
initBatch(DEFAULT_MEMORY_MODE, partitionColumns, partitionValues);
}
/**
* Returns the ColumnarBatch object that will be used for all rows returned by this reader.
* This object is reused. Calling this enables the vectorized reader. This should be called
* before any calls to nextKeyValue/nextBatch.
*/
public ColumnarBatch resultBatch() {
if (columnarBatch == null) initBatch();
return columnarBatch;
}
/**
* Can be called before any rows are returned to enable returning columnar batches directly.
*/
public void enableReturningBatches() {
returnColumnarBatch = true;
}
/**
* Advances to the next batch of rows. Returns false if there are no more.
*/
public boolean nextBatch() throws IOException {
columnarBatch.reset();
if (rowsReturned >= totalRowCount) return false;
checkEndOfRowGroup();
int num = (int) Math.min((long) columnarBatch.capacity(), totalCountLoadedSoFar - rowsReturned);
for (int i = 0; i < columnReaders.length; ++i) {
if (columnReaders[i] == null) continue;
columnReaders[i].readBatch(num, columnVectors[i]);
}
rowsReturned += num;
columnarBatch.setNumRows(num);
numBatched = num;
batchIdx = 0;
return true;
}
private void initializeInternal() throws IOException, UnsupportedOperationException {
// Check that the requested schema is supported.
missingColumns = new boolean[requestedSchema.getFieldCount()];
for (int i = 0; i < requestedSchema.getFieldCount(); ++i) {
Type t = requestedSchema.getFields().get(i);
if (!t.isPrimitive() || t.isRepetition(Type.Repetition.REPEATED)) {
throw new UnsupportedOperationException("Complex types not supported.");
}
String[] colPath = requestedSchema.getPaths().get(i);
if (fileSchema.containsPath(colPath)) {
ColumnDescriptor fd = fileSchema.getColumnDescription(colPath);
if (!fd.equals(requestedSchema.getColumns().get(i))) {
throw new UnsupportedOperationException("Schema evolution not supported.");
}
missingColumns[i] = false;
} else {
if (requestedSchema.getColumns().get(i).getMaxDefinitionLevel() == 0) {
// Column is missing in data but the required data is non-nullable. This file is invalid.
throw new IOException("Required column is missing in data file. Col: " +
Arrays.toString(colPath));
}
missingColumns[i] = true;
}
}
}
private void checkEndOfRowGroup() throws IOException {
if (rowsReturned != totalCountLoadedSoFar) return;
PageReadStore pages = reader.readNextRowGroup();
if (pages == null) {
throw new IOException("expecting more rows but reached last block. Read "
+ rowsReturned + " out of " + totalRowCount);
}
List<ColumnDescriptor> columns = requestedSchema.getColumns();
List<Type> types = requestedSchema.asGroupType().getFields();
columnReaders = new VectorizedColumnReader[columns.size()];
for (int i = 0; i < columns.size(); ++i) {
if (missingColumns[i]) continue;
columnReaders[i] = new VectorizedColumnReader(
columns.get(i), types.get(i).getOriginalType(), pages.getPageReader(columns.get(i)));
}
totalCountLoadedSoFar += pages.getRowCount();
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.igtest.performance.string_concatenation_in_loops;
import java.util.*;
public class StringConcatenationInLoop
{
public StringConcatenationInLoop()
{
}
public String foo()
{
String foo = "";
for(int i = 0; i < 5; i++)
{
(foo) = ((foo) <warning descr="String concatenation '+' in loop">+</warning> (" ") + (i));
foo += "abc"; // only first concatenation in the loop is reported for given variable
}
for(int i = 0; i < 5; i++)
{
foo <warning descr="String concatenation '+=' in loop">+=</warning> foo + " " + i;
}
for(int i = 0; i < 5; i++)
{
baz( foo + " " + i);
}
for(int i = 0; i < 5; i++)
{
if(bar())
{
return baz(("foo" + "bar"));
}
}
for(int i = 0; i < 5; i++)
{
if(bar())
{
throw new Error("foo" + i);
}
}
String s = "";
for(int i = 0; i < 5; i++) {
if(i > 2) {
s += i;
{
System.out.println(s);
break;
}
}
}
for(int i = 0; i < 5; i++) {
if(i > 2) {
s += i;
{
System.out.println(s);
{
break;
}
}
}
}
for(int i = 0; i < 5; i++) {
if(i > 2) {
s += i;
{
{
System.out.println(s);
}
break;
}
}
}
for(int i = 0; i < 5; i++) {
if(i > 2) {
s <warning descr="String concatenation '+=' in loop">+=</warning> i;
{
System.out.println(s);
}
System.out.println(s);
}
}
for (int i = 0; i < 10; i++) {
s = (s == "") + "...";
}
for (int i = 0; i < 10; i++) {
s = ("xyz" <warning descr="String concatenation '+' in loop">+</warning> (i <warning descr="String concatenation '+' in loop">+</warning> s)) <warning descr="String concatenation '+' in loop">+</warning> "...";
}
System.out.println(foo);
return foo;
}
void test(String s) {
for(int i=0; i<10; i++) {
if(s != null) {
if(s.equals("xyz")) {
s += "xyz";
}
break;
}
}
}
void test2(String s, int flag) {
for(int i=0; i<10; i++) {
if(flag == 0) {
s<warning descr="String concatenation '+=' in loop">+=</warning>"xyz";
continue; // continue doesn't matters: we're still iterating in loop
}
System.out.println("oops");
}
}
void test3(String s, int flag) {
for(int i=0; i<10; i++) {
for(int j=0; j<10; j++) {
if(flag == 0) {
s<warning descr="String concatenation '+=' in loop">+=</warning>"xyz";
break; // breaking inner loop only: still concatenation in loop
}
}
}
}
void test4(String s, int flag) {
for(int i=0; i<10; i++) {
s = "x";
for(int j=0; j<10; j++) {
if(flag == 0) {
s+="xyz";
break; // breaking inner loop only, but variable is always reassigned in outer loop, so effectively concatenating once
}
}
System.out.println(s);
}
}
void testLabel(String s, int flag) {
OUTER:
for(int i=0; i<10; i++) {
for(int j=0; j<10; j++) {
if(flag == 0) {
s+="xyz";
break OUTER; // breaking outer loop: ok
}
}
}
}
void testDefinedInLoop(List<?> list) {
for(Object obj : list) {
String s = "message";
if(obj != null) {
s+=obj; // replacing with StringBuilder won't change anything
}
System.out.println(s);
}
}
public void testSwitch(int flag) {
String s = "";
for(int i=0; i<10; i++) {
if(i > 5) {
if(s != null) {
switch(flag) {
case 0:
s += "xyz";
break; // break switch, then break loop: effectively single concatenation
case 1:
s <warning descr="String concatenation '+=' in loop">+=</warning> "abc"; // fall-through
case 2:
s <warning descr="String concatenation '+=' in loop">+=</warning> "efg"; // continue loop: possibly multiple concatenations
continue;
}
}
break;
}
}
System.out.println(s);
}
private boolean bar()
{
return true;
}
private String baz(String s)
{
return s;
}
public void oper() {
final String[] array = new String[] { "a", "a", "a" };
String s = "asdf";
final int len = array.length;
for (int k = 0; k < len; k++) {
array[k] += "b";
s <warning descr="String concatenation '+=' in loop">+=</warning> k;
}
}
void bla() {
while (true) {
System.out.println("a" + "b" + "c");
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.j2objc.translate;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.google.devtools.j2objc.ast.AbstractTypeDeclaration;
import com.google.devtools.j2objc.ast.Annotation;
import com.google.devtools.j2objc.ast.AnnotationTypeDeclaration;
import com.google.devtools.j2objc.ast.AnnotationTypeMemberDeclaration;
import com.google.devtools.j2objc.ast.Block;
import com.google.devtools.j2objc.ast.BodyDeclaration;
import com.google.devtools.j2objc.ast.CompilationUnit;
import com.google.devtools.j2objc.ast.EnumConstantDeclaration;
import com.google.devtools.j2objc.ast.EnumDeclaration;
import com.google.devtools.j2objc.ast.Expression;
import com.google.devtools.j2objc.ast.FieldDeclaration;
import com.google.devtools.j2objc.ast.FunctionDeclaration;
import com.google.devtools.j2objc.ast.MethodDeclaration;
import com.google.devtools.j2objc.ast.NativeExpression;
import com.google.devtools.j2objc.ast.NativeStatement;
import com.google.devtools.j2objc.ast.ReturnStatement;
import com.google.devtools.j2objc.ast.SingleVariableDeclaration;
import com.google.devtools.j2objc.ast.Statement;
import com.google.devtools.j2objc.ast.TreeUtil;
import com.google.devtools.j2objc.ast.TypeDeclaration;
import com.google.devtools.j2objc.ast.UnitTreeVisitor;
import com.google.devtools.j2objc.ast.VariableDeclarationFragment;
import com.google.devtools.j2objc.types.GeneratedExecutableElement;
import com.google.devtools.j2objc.types.GeneratedTypeElement;
import com.google.devtools.j2objc.types.NativeType;
import com.google.devtools.j2objc.util.CodeReferenceMap;
import com.google.devtools.j2objc.util.ElementUtil;
import com.google.devtools.j2objc.util.ErrorUtil;
import com.google.devtools.j2objc.util.TypeUtil;
import com.google.devtools.j2objc.util.UnicodeUtils;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.ArrayType;
import javax.lang.model.type.TypeMirror;
/**
* Adds the __metadata method to classes to support reflection.
*/
public class MetadataWriter extends UnitTreeVisitor {
// Metadata structure version. Increment it when any structure changes are made.
public static final int METADATA_VERSION = 7;
private static final NativeType CLASS_INFO_TYPE = new NativeType("const J2ObjcClassInfo *");
private final ArrayType annotationArray;
private final ArrayType annotationArray2D;
public MetadataWriter(CompilationUnit unit, CodeReferenceMap deadCodeMap) {
super(unit);
TypeMirror annotationType =
GeneratedTypeElement.newEmulatedInterface("java.lang.annotation.Annotation").asType();
annotationArray = typeUtil.getArrayType(annotationType);
annotationArray2D = typeUtil.getArrayType(annotationArray);
}
@Override
public void endVisit(TypeDeclaration node) {
visitType(node);
}
@Override
public void endVisit(EnumDeclaration node) {
visitType(node);
}
@Override
public void endVisit(AnnotationTypeDeclaration node) {
visitType(node);
}
private void visitType(AbstractTypeDeclaration node) {
if (node.isDeadClass()) {
return;
}
TypeElement type = node.getTypeElement();
if (!translationUtil.needsReflection(type)) {
return;
}
ExecutableElement metadataElement =
GeneratedExecutableElement.newMethodWithSelector("__metadata", CLASS_INFO_TYPE, type)
.addModifiers(Modifier.STATIC, Modifier.PRIVATE);
MethodDeclaration metadataDecl = new MethodDeclaration(metadataElement);
metadataDecl.setHasDeclaration(false);
Block body = new Block();
metadataDecl.setBody(body);
new MetadataGenerator(node, body.getStatements()).generateClassMetadata();
node.addBodyDeclaration(metadataDecl);
}
/**
* Generates the metadata contents for a single type.
*/
private class MetadataGenerator {
private final AbstractTypeDeclaration typeNode;
private final TypeElement type;
private final String className;
private final List<Statement> stmts;
// Use a LinkedHashMap so that we can de-dupe values that are added to the pointer table.
private final LinkedHashMap<String, Integer> pointers = new LinkedHashMap<>();
private int annotationFuncCount = 0;
private MetadataGenerator(AbstractTypeDeclaration typeNode, List<Statement> stmts) {
this.typeNode = typeNode;
type = typeNode.getTypeElement();
className = nameTable.getFullName(type);
this.stmts = stmts;
}
private void generateClassMetadata() {
String fullName = nameTable.getFullName(type);
int methodMetadataCount = generateMethodsMetadata();
int fieldMetadataCount = generateFieldsMetadata();
String annotationsFunc = createAnnotationsFunction(typeNode);
String metadata = UnicodeUtils.format(
"static const J2ObjcClassInfo _%s = { "
+ "%s, %s, %%s, %s, %s, %d, 0x%x, %d, %d, %s, %s, %s, %s, %s };",
fullName,
cStr(ElementUtil.isAnonymous(type) ? "" : ElementUtil.getName(type)),
cStr(Strings.emptyToNull(ElementUtil.getName(ElementUtil.getPackage(type)))),
methodMetadataCount > 0 ? "methods" : "NULL",
fieldMetadataCount > 0 ? "fields" : "NULL",
METADATA_VERSION,
getTypeModifiers(type),
methodMetadataCount,
fieldMetadataCount,
cStrIdx(getTypeName(ElementUtil.getDeclaringClass(type))),
cStrIdx(getTypeList(ElementUtil.asTypes(ElementUtil.getDeclaredTypes(type)))),
cStrIdx(getEnclosingMethodSelector()),
cStrIdx(signatureGenerator.createClassSignature(type)),
funcPtrIdx(annotationsFunc));
// Add the pointer table in a second format pass since it's value is dependent on all other
// values.
metadata = UnicodeUtils.format(metadata, getPtrTableEntry());
stmts.add(new NativeStatement(metadata));
stmts.add(new ReturnStatement(new NativeExpression("&_" + fullName, CLASS_INFO_TYPE)));
}
private String getPtrTableEntry() {
if (pointers.isEmpty()) {
return "NULL";
}
if (pointers.size() > Short.MAX_VALUE) {
// Note that values greater that 2^15 and less than 2^16 will not result in a compile
// error even though the index type is declared as signed.
// This limit is more restrictive than existing limits on number of methods and fields
// imposed by the JVM class file format, which allows up to 2^16 each of methods and
// fields. Our limit is a few times smaller than Java's since we use a signed index, share
// the table between methods and fields, and have multiple entries for each method or
// field that can index into the table. See JVMS-4.11.
ErrorUtil.error(typeNode, "Too many metadata entries causing overflow.");
}
stmts.add(new NativeStatement(
"static const void *ptrTable[] = { " + Joiner.on(", ").join(pointers.keySet()) + " };"));
return "ptrTable";
}
private int generateMethodsMetadata() {
List<String> methodMetadata = new ArrayList<>();
List<String> selectorMetadata = new ArrayList<>();
int methodCount = 0;
for (MethodDeclaration decl : TreeUtil.getMethodDeclarations(typeNode)) {
ExecutableElement element = decl.getExecutableElement();
// Skip synthetic methods and enum constructors.
if (ElementUtil.isSynthetic(element)
|| (ElementUtil.isEnum(type) && ElementUtil.isConstructor(element))) {
continue;
}
String annotationsFunc = createAnnotationsFunction(decl);
String paramAnnotationsFunc = createParamAnnotationsFunction(decl);
methodMetadata.add(getMethodMetadata(element, annotationsFunc, paramAnnotationsFunc));
String selector = nameTable.getMethodSelector(element);
String metadata = UnicodeUtils.format("methods[%d].selector = @selector(%s);",
methodCount, selector);
++methodCount;
selectorMetadata.add(metadata);
}
if (typeNode instanceof AnnotationTypeDeclaration) {
// Add property accessor and static default methods.
for (AnnotationTypeMemberDeclaration decl : TreeUtil.getAnnotationMembers(typeNode)) {
String name = ElementUtil.getName(decl.getExecutableElement());
String returnType = getTypeName(decl.getExecutableElement().getReturnType());
String metadata = UnicodeUtils.format(" { NULL, %s, 0x%x, -1, -1, -1, -1, -1, -1 },\n",
cStr(returnType),
java.lang.reflect.Modifier.PUBLIC | java.lang.reflect.Modifier.ABSTRACT);
methodMetadata.add(metadata);
metadata = UnicodeUtils.format("methods[%d].selector = @selector(%s);",
methodCount, name);
++methodCount;
selectorMetadata.add(metadata);
}
}
if (methodMetadata.size() > 0) {
StringBuilder sb = new StringBuilder("static J2ObjcMethodInfo methods[] = {\n");
for (String metadata : methodMetadata) {
sb.append(metadata);
}
sb.append(" };");
stmts.add(new NativeStatement(sb.toString()));
stmts.add(new NativeStatement("#pragma clang diagnostic push"));
stmts.add(new NativeStatement(
"#pragma clang diagnostic ignored \"-Wobjc-multiple-method-names\""));
stmts.add(new NativeStatement(
"#pragma clang diagnostic ignored \"-Wundeclared-selector\""));
for (String selector : selectorMetadata) {
stmts.add(new NativeStatement(selector));
}
stmts.add(new NativeStatement("#pragma clang diagnostic pop"));
}
return methodMetadata.size();
}
private String getMethodMetadata(
ExecutableElement method, String annotationsFunc, String paramAnnotationsFunc) {
String methodName = ElementUtil.getName(method);
String selector = nameTable.getMethodSelector(method);
boolean isConstructor = ElementUtil.isConstructor(method);
if (selector.equals(methodName) || isConstructor) {
methodName = null; // Reduce redundant data.
}
int modifiers = getMethodModifiers(method) & ElementUtil.ACC_FLAG_MASK;
String returnTypeStr = isConstructor ? null : getTypeName(method.getReturnType());
return UnicodeUtils.format(" { NULL, %s, 0x%x, %s, %s, %s, %s, %s, %s },\n",
cStr(returnTypeStr), modifiers, cStrIdx(methodName),
cStrIdx(getTypeList(ElementUtil.asTypes(method.getParameters()))),
cStrIdx(getTypeList(method.getThrownTypes())),
cStrIdx(signatureGenerator.createMethodTypeSignature(method)),
funcPtrIdx(annotationsFunc), funcPtrIdx(paramAnnotationsFunc));
}
private int generateFieldsMetadata() {
List<String> fieldMetadata = new ArrayList<>();
if (typeNode instanceof EnumDeclaration) {
for (EnumConstantDeclaration decl : ((EnumDeclaration) typeNode).getEnumConstants()) {
String annotationsFunc = createAnnotationsFunction(decl);
fieldMetadata.add(generateFieldMetadata(decl.getVariableElement(), annotationsFunc));
}
}
for (FieldDeclaration decl : TreeUtil.getFieldDeclarations(typeNode)) {
// Fields that share a declaration can share an annotations function.
String annotationsFunc = createAnnotationsFunction(decl);
for (VariableDeclarationFragment f : decl.getFragments()) {
String metadata = generateFieldMetadata(f.getVariableElement(), annotationsFunc);
if (metadata != null) {
fieldMetadata.add(metadata);
}
}
}
if (fieldMetadata.size() > 0) {
StringBuilder sb = new StringBuilder("static const J2ObjcFieldInfo fields[] = {\n");
for (String metadata : fieldMetadata) {
sb.append(metadata);
}
sb.append(" };");
stmts.add(new NativeStatement(sb.toString()));
}
return fieldMetadata.size();
}
private String generateFieldMetadata(VariableElement var, String annotationsFunc) {
int modifiers = getFieldModifiers(var);
boolean isStatic = ElementUtil.isStatic(var);
String javaName = ElementUtil.getName(var);
String objcName = nameTable.getVariableShortName(var);
if ((isStatic && objcName.equals(javaName))
|| (!isStatic && objcName.equals(javaName + '_'))) {
// Don't print Java name if it matches the default pattern, to conserve space.
javaName = null;
}
if ((ElementUtil.isEnumConstant(var) && options.stripEnumConstants())) {
objcName = null;
}
String staticRef = null;
String constantValue;
if (ElementUtil.isPrimitiveConstant(var)) {
constantValue = UnicodeUtils.format(".constantValue.%s = %s",
getRawValueField(var), nameTable.getVariableQualifiedName(var));
} else {
// Explicit 0-initializer to avoid Clang warning.
constantValue = ".constantValue.asLong = 0";
if (isStatic) {
staticRef = nameTable.getVariableQualifiedName(var);
}
}
return UnicodeUtils.format(
" { %s, %s, %s, 0x%x, %s, %s, %s, %s },\n",
cStr(objcName), cStr(getTypeName(var.asType())), constantValue, modifiers,
cStrIdx(javaName), addressOfIdx(staticRef),
cStrIdx(signatureGenerator.createFieldTypeSignature(var)), funcPtrIdx(annotationsFunc));
}
private String getEnclosingMethodSelector() {
Element enclosing = type.getEnclosingElement();
return ElementUtil.isExecutableElement(enclosing)
? nameTable.getMethodSelector((ExecutableElement) enclosing) : null;
}
private String cStrIdx(String str) {
return getPointerIdx(str != null ? "\"" + str + "\"" : null);
}
private String addressOfIdx(String name) {
return getPointerIdx(name != null ? "&" + name : null);
}
// Same as addressOfIdx, but adds a (void *) cast to satisfy c++ compilers.
private String funcPtrIdx(String name) {
return getPointerIdx(name != null ? "(void *)&" + name : null);
}
private String getPointerIdx(String ptr) {
if (ptr == null) {
return "-1";
}
Integer idx = pointers.get(ptr);
if (idx == null) {
idx = pointers.size();
pointers.put(ptr, idx);
}
return idx.toString();
}
/**
* Generate a function that returns the annotations for a BodyDeclarations node.
*/
private String createAnnotationsFunction(BodyDeclaration decl) {
List<Annotation> runtimeAnnotations =
TreeUtil.getRuntimeAnnotationsList(decl.getAnnotations());
if (runtimeAnnotations.isEmpty()) {
return null;
}
return addAnnotationsFunction(createAnnotations(runtimeAnnotations));
}
/**
* Generate a function that returns the 2-dimentional array of annotations for method
* parameters.
*/
private String createParamAnnotationsFunction(MethodDeclaration method) {
List<SingleVariableDeclaration> params = method.getParameters();
// Quick test to see if there are any parameter annotations.
boolean hasAnnotations = false;
for (SingleVariableDeclaration param : params) {
if (!Iterables.isEmpty(TreeUtil.getRuntimeAnnotations(param.getAnnotations()))) {
hasAnnotations = true;
break;
}
}
if (!hasAnnotations) {
return null;
}
List<Expression> subArrays = new ArrayList<>();
for (SingleVariableDeclaration param : params) {
subArrays.add(createAnnotations(
TreeUtil.getRuntimeAnnotationsList(param.getAnnotations())));
}
return addAnnotationsFunction(
translationUtil.createObjectArray(subArrays, annotationArray2D));
}
private String addAnnotationsFunction(Expression result) {
String name = className + "__Annotations$" + annotationFuncCount++;
FunctionDeclaration decl = new FunctionDeclaration(name, result.getTypeMirror());
decl.addModifiers(java.lang.reflect.Modifier.PRIVATE);
Block body = new Block();
decl.setBody(body);
body.addStatement(new ReturnStatement(result));
typeNode.addBodyDeclaration(decl);
return name;
}
}
private Expression createAnnotations(List<Annotation> annotations) {
List<Expression> expressions = new ArrayList<>();
for (Annotation annotation : annotations) {
expressions.add(translationUtil.createAnnotation(annotation.getAnnotationMirror()));
}
return translationUtil.createObjectArray(expressions, annotationArray);
}
private static String getRawValueField(VariableElement var) {
switch (var.asType().getKind()) {
case BOOLEAN: return "asBOOL";
case BYTE: return "asChar";
case CHAR: return "asUnichar";
case DOUBLE: return "asDouble";
case FLOAT: return "asFloat";
case INT: return "asInt";
case LONG: return "asLong";
case SHORT: return "asShort";
default: throw new AssertionError("Expected a primitive type.");
}
}
private String getTypeName(TypeMirror type) {
if (type == null) {
return null;
}
type = typeUtil.erasure(type);
if (TypeUtil.isDeclaredType(type)) {
return getTypeName(TypeUtil.asTypeElement(type));
} else if (TypeUtil.isArray(type)) {
return "[" + getTypeName(((ArrayType) type).getComponentType());
} else {
return TypeUtil.getBinaryName(type);
}
}
private String getTypeName(TypeElement type) {
return type == null ? null : "L" + nameTable.getFullName(type) + ";";
}
private String getTypeList(Iterable<? extends TypeMirror> types) {
if (Iterables.isEmpty(types)) {
return null;
}
StringBuilder sb = new StringBuilder();
for (TypeMirror type : types) {
sb.append(getTypeName(type));
}
return sb.toString();
}
/**
* Returns the modifiers for a specified type, including internal ones.
* All class modifiers are defined in the JVM specification, table 4.1.
*/
private static int getTypeModifiers(TypeElement type) {
int modifiers = ElementUtil.fromModifierSet(type.getModifiers());
if (type.getKind().isInterface()) {
modifiers |= java.lang.reflect.Modifier.INTERFACE | java.lang.reflect.Modifier.ABSTRACT
| java.lang.reflect.Modifier.STATIC;
}
if (ElementUtil.isSynthetic(type)) {
modifiers |= ElementUtil.ACC_SYNTHETIC;
}
if (ElementUtil.isAnnotationType(type)) {
modifiers |= ElementUtil.ACC_ANNOTATION;
}
if (ElementUtil.isEnum(type)) {
modifiers |= ElementUtil.ACC_ENUM;
}
if (ElementUtil.isAnonymous(type)) {
modifiers |= ElementUtil.ACC_ANONYMOUS;
}
return modifiers;
}
/**
* Returns the modifiers for a specified method, including internal ones.
* All method modifiers are defined in the JVM specification, table 4.5.
*/
private static int getMethodModifiers(ExecutableElement method) {
int modifiers = ElementUtil.fromModifierSet(method.getModifiers());
if (method.isVarArgs()) {
modifiers |= ElementUtil.ACC_VARARGS;
}
if (ElementUtil.isSynthetic(method)) {
modifiers |= ElementUtil.ACC_SYNTHETIC;
}
return modifiers;
}
/**
* Returns the modifiers for a specified field, including internal ones.
* All method modifiers are defined in the JVM specification, table 4.4.
*/
private static int getFieldModifiers(VariableElement var) {
int modifiers = ElementUtil.fromModifierSet(var.getModifiers());
if (ElementUtil.isSynthetic(var)) {
modifiers |= ElementUtil.ACC_SYNTHETIC;
}
if (ElementUtil.isEnumConstant(var)) {
modifiers |= ElementUtil.ACC_ENUM;
}
return modifiers;
}
private String cStr(String s) {
return s == null ? "NULL" : "\"" + s + "\"";
}
}
| |
/* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 5.0 */
/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package org.apache.jena.sparql.lang.sparql_11 ;
/**
* An implementation of interface CharStream, where the stream is assumed to
* contain only ASCII characters (with java-like unicode escape processing).
*/
public
class JavaCharStream
{
/** Whether parser is static. */
public static final boolean staticFlag = false;
static final int hexval(char c) throws java.io.IOException {
switch(c)
{
case '0' :
return 0;
case '1' :
return 1;
case '2' :
return 2;
case '3' :
return 3;
case '4' :
return 4;
case '5' :
return 5;
case '6' :
return 6;
case '7' :
return 7;
case '8' :
return 8;
case '9' :
return 9;
case 'a' :
case 'A' :
return 10;
case 'b' :
case 'B' :
return 11;
case 'c' :
case 'C' :
return 12;
case 'd' :
case 'D' :
return 13;
case 'e' :
case 'E' :
return 14;
case 'f' :
case 'F' :
return 15;
}
throw new java.io.IOException(); // Should never come here
}
/** Position in buffer. */
public int bufpos = -1;
int bufsize;
int available;
int tokenBegin;
protected int bufline[];
protected int bufcolumn[];
protected int column = 0;
protected int line = 1;
protected boolean prevCharIsCR = false;
protected boolean prevCharIsLF = false;
protected java.io.Reader inputStream;
protected char[] nextCharBuf;
protected char[] buffer;
protected int maxNextCharInd = 0;
protected int nextCharInd = -1;
protected int inBuf = 0;
protected int tabSize = 8;
protected void setTabSize(int i) { tabSize = i; }
protected int getTabSize(int i) { return tabSize; }
protected void ExpandBuff(boolean wrapAround)
{
char[] newbuffer = new char[bufsize + 2048];
int newbufline[] = new int[bufsize + 2048];
int newbufcolumn[] = new int[bufsize + 2048];
try
{
if (wrapAround)
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
bufcolumn = newbufcolumn;
bufpos += (bufsize - tokenBegin);
}
else
{
System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
buffer = newbuffer;
System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
bufline = newbufline;
System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
bufcolumn = newbufcolumn;
bufpos -= tokenBegin;
}
}
catch (Throwable t)
{
throw new Error(t.getMessage());
}
available = (bufsize += 2048);
tokenBegin = 0;
}
protected void FillBuff() throws java.io.IOException
{
int i;
if (maxNextCharInd == 4096)
maxNextCharInd = nextCharInd = 0;
try {
if ((i = inputStream.read(nextCharBuf, maxNextCharInd,
4096 - maxNextCharInd)) == -1)
{
inputStream.close();
throw new java.io.IOException();
}
else
maxNextCharInd += i;
return;
}
catch(java.io.IOException e) {
if (bufpos != 0)
{
--bufpos;
backup(0);
}
else
{
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
throw e;
}
}
protected char ReadByte() throws java.io.IOException
{
if (++nextCharInd >= maxNextCharInd)
FillBuff();
return nextCharBuf[nextCharInd];
}
/** @return starting character for token. */
public char BeginToken() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
tokenBegin = bufpos;
return buffer[bufpos];
}
tokenBegin = 0;
bufpos = -1;
return readChar();
}
protected void AdjustBuffSize()
{
if (available == bufsize)
{
if (tokenBegin > 2048)
{
bufpos = 0;
available = tokenBegin;
}
else
ExpandBuff(false);
}
else if (available > tokenBegin)
available = bufsize;
else if ((tokenBegin - available) < 2048)
ExpandBuff(true);
else
available = tokenBegin;
}
protected void UpdateLineColumn(char c)
{
column++;
if (prevCharIsLF)
{
prevCharIsLF = false;
line += (column = 1);
}
else if (prevCharIsCR)
{
prevCharIsCR = false;
if (c == '\n')
{
prevCharIsLF = true;
}
else
line += (column = 1);
}
switch (c)
{
case '\r' :
prevCharIsCR = true;
break;
case '\n' :
prevCharIsLF = true;
break;
case '\t' :
column--;
column += (tabSize - (column % tabSize));
break;
default :
break;
}
bufline[bufpos] = line;
bufcolumn[bufpos] = column;
}
/** Read a character. */
public char readChar() throws java.io.IOException
{
if (inBuf > 0)
{
--inBuf;
if (++bufpos == bufsize)
bufpos = 0;
return buffer[bufpos];
}
char c;
if (++bufpos == available)
AdjustBuffSize();
if ((buffer[bufpos] = c = ReadByte()) == '\\')
{
UpdateLineColumn(c);
int backSlashCnt = 1;
for (;;) // Read all the backslashes
{
if (++bufpos == available)
AdjustBuffSize();
try
{
if ((buffer[bufpos] = c = ReadByte()) != '\\')
{
UpdateLineColumn(c);
// found a non-backslash char.
if ((c == 'u') && ((backSlashCnt & 1) == 1))
{
if (--bufpos < 0)
bufpos = bufsize - 1;
break;
}
backup(backSlashCnt);
return '\\';
}
}
catch(java.io.IOException e)
{
// We are returning one backslash so we should only backup (count-1)
if (backSlashCnt > 1)
backup(backSlashCnt-1);
return '\\';
}
UpdateLineColumn(c);
backSlashCnt++;
}
// Here, we have seen an odd number of backslash's followed by a 'u'
try
{
while ((c = ReadByte()) == 'u')
++column;
buffer[bufpos] = c = (char)(hexval(c) << 12 |
hexval(ReadByte()) << 8 |
hexval(ReadByte()) << 4 |
hexval(ReadByte()));
column += 4;
}
catch(java.io.IOException e)
{
throw new Error("Invalid escape character at line " + line +
" column " + column + ".");
}
if (backSlashCnt == 1)
return c;
else
{
backup(backSlashCnt - 1);
return '\\';
}
}
else
{
UpdateLineColumn(c);
return c;
}
}
@Deprecated
/**
* @deprecated
* @see #getEndColumn
*/
public int getColumn() {
return bufcolumn[bufpos];
}
@Deprecated
/**
* @deprecated
* @see #getEndLine
*/
public int getLine() {
return bufline[bufpos];
}
/** Get end column. */
public int getEndColumn() {
return bufcolumn[bufpos];
}
/** Get end line. */
public int getEndLine() {
return bufline[bufpos];
}
/** @return column of token start */
public int getBeginColumn() {
return bufcolumn[tokenBegin];
}
/** @return line number of token start */
public int getBeginLine() {
return bufline[tokenBegin];
}
/** Retreat. */
public void backup(int amount) {
inBuf += amount;
if ((bufpos -= amount) < 0)
bufpos += bufsize;
}
/** Constructor. */
public JavaCharStream(java.io.Reader dstream,
int startline, int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
nextCharBuf = new char[4096];
}
/** Constructor. */
public JavaCharStream(java.io.Reader dstream,
int startline, int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.Reader dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream,
int startline, int startcolumn, int buffersize)
{
inputStream = dstream;
line = startline;
column = startcolumn - 1;
if (buffer == null || buffersize != buffer.length)
{
available = bufsize = buffersize;
buffer = new char[buffersize];
bufline = new int[buffersize];
bufcolumn = new int[buffersize];
nextCharBuf = new char[4096];
}
prevCharIsLF = prevCharIsCR = false;
tokenBegin = inBuf = maxNextCharInd = 0;
nextCharInd = bufpos = -1;
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream,
int startline, int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.Reader dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
this(new java.io.InputStreamReader(dstream), startline, startcolumn, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, startline, startcolumn, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream, int startline,
int startcolumn)
{
this(dstream, startline, startcolumn, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
this(dstream, encoding, 1, 1, 4096);
}
/** Constructor. */
public JavaCharStream(java.io.InputStream dstream)
{
this(dstream, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
{
ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn, int buffersize)
{
ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding, int startline,
int startcolumn) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, int startline,
int startcolumn)
{
ReInit(dstream, startline, startcolumn, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
{
ReInit(dstream, encoding, 1, 1, 4096);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream dstream)
{
ReInit(dstream, 1, 1, 4096);
}
/** @return token image as String */
public String GetImage()
{
if (bufpos >= tokenBegin)
return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
else
return new String(buffer, tokenBegin, bufsize - tokenBegin) +
new String(buffer, 0, bufpos + 1);
}
/** @return suffix */
public char[] GetSuffix(int len)
{
char[] ret = new char[len];
if ((bufpos + 1) >= len)
System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
else
{
System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
len - bufpos - 1);
System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
}
return ret;
}
/** Set buffers back to null when finished. */
public void Done()
{
nextCharBuf = null;
buffer = null;
bufline = null;
bufcolumn = null;
}
/**
* Method to adjust line and column numbers for the start of a token.
*/
public void adjustBeginLineColumn(int newLine, int newCol)
{
int start = tokenBegin;
int len;
if (bufpos >= tokenBegin)
{
len = bufpos - tokenBegin + inBuf + 1;
}
else
{
len = bufsize - tokenBegin + bufpos + 1 + inBuf;
}
int i = 0, j = 0, k = 0;
int nextColDiff = 0, columnDiff = 0;
while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
{
bufline[j] = newLine;
nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
bufcolumn[j] = newCol + columnDiff;
columnDiff = nextColDiff;
i++;
}
if (i < len)
{
bufline[j] = newLine++;
bufcolumn[j] = newCol + columnDiff;
while (i++ < len)
{
if (bufline[j = start % bufsize] != bufline[++start % bufsize])
bufline[j] = newLine++;
else
bufline[j] = newLine;
}
}
line = bufline[j];
column = bufcolumn[j];
}
}
/* JavaCC - OriginalChecksum=d63a793bd614cb11b1bb35c273b7864c (do not edit this line) */
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.lib;
import java.util.List;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.lib.KeyFieldHelper.KeyDescription;
import org.apache.hadoop.io.Text;
/**
* This comparator implementation provides a subset of the features provided
* by the Unix/GNU Sort. In particular, the supported features are:
* -n, (Sort numerically)
* -r, (Reverse the result of comparison)
* -k pos1[,pos2], where pos is of the form f[.c][opts], where f is the number
* of the field to use, and c is the number of the first character from the
* beginning of the field. Fields and character posns are numbered starting
* with 1; a character position of zero in pos2 indicates the field's last
* character. If '.c' is omitted from pos1, it defaults to 1 (the beginning
* of the field); if omitted from pos2, it defaults to 0 (the end of the
* field). opts are ordering options (any of 'nr' as described above).
* We assume that the fields in the key are separated by
* map.output.key.field.separator.
*/
public class KeyFieldBasedComparator<K, V> extends WritableComparator
implements JobConfigurable {
private KeyFieldHelper keyFieldHelper = new KeyFieldHelper();
private static final byte NEGATIVE = (byte)'-';
private static final byte ZERO = (byte)'0';
private static final byte DECIMAL = (byte)'.';
public void configure(JobConf job) {
String option = job.getKeyFieldComparatorOption();
String keyFieldSeparator = job.get("map.output.key.field.separator","\t");
keyFieldHelper.setKeyFieldSeparator(keyFieldSeparator);
keyFieldHelper.parseOption(option);
}
public KeyFieldBasedComparator() {
super(Text.class);
}
public int compare(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
int n1 = WritableUtils.decodeVIntSize(b1[s1]);
int n2 = WritableUtils.decodeVIntSize(b2[s2]);
List <KeyDescription> allKeySpecs = keyFieldHelper.keySpecs();
if (allKeySpecs.size() == 0) {
return compareBytes(b1, s1+n1, l1-n1, b2, s2+n2, l2-n2);
}
int []lengthIndicesFirst = keyFieldHelper.getWordLengths(b1, s1+n1, s1+l1);
int []lengthIndicesSecond = keyFieldHelper.getWordLengths(b2, s2+n2, s2+l2);
for (KeyDescription keySpec : allKeySpecs) {
int startCharFirst = keyFieldHelper.getStartOffset(b1, s1+n1, s1+l1, lengthIndicesFirst,
keySpec);
int endCharFirst = keyFieldHelper.getEndOffset(b1, s1+n1, s1+l1, lengthIndicesFirst,
keySpec);
int startCharSecond = keyFieldHelper.getStartOffset(b2, s2+n2, s2+l2, lengthIndicesSecond,
keySpec);
int endCharSecond = keyFieldHelper.getEndOffset(b2, s2+n2, s2+l2, lengthIndicesSecond,
keySpec);
int result;
if ((result = compareByteSequence(b1, startCharFirst, endCharFirst, b2,
startCharSecond, endCharSecond, keySpec)) != 0) {
return result;
}
}
return 0;
}
private int compareByteSequence(byte[] first, int start1, int end1,
byte[] second, int start2, int end2, KeyDescription key) {
if (start1 == -1) {
if (key.reverse) {
return 1;
}
return -1;
}
if (start2 == -1) {
if (key.reverse) {
return -1;
}
return 1;
}
int compareResult = 0;
if (!key.numeric) {
compareResult = compareBytes(first, start1, end1, second, start2, end2);
}
if (key.numeric) {
compareResult = numericalCompare (first, start1, end1, second, start2, end2);
}
if (key.reverse) {
return -compareResult;
}
return compareResult;
}
private int numericalCompare (byte[] a, int start1, int end1,
byte[] b, int start2, int end2) {
int i = start1;
int j = start2;
int mul = 1;
byte first_a = a[i];
byte first_b = b[j];
if (first_a == NEGATIVE) {
if (first_b != NEGATIVE) {
//check for cases like -0.0 and 0.0 (they should be declared equal)
return oneNegativeCompare(a,start1+1,end1,b,start2,end2);
}
i++;
}
if (first_b == NEGATIVE) {
if (first_a != NEGATIVE) {
//check for cases like 0.0 and -0.0 (they should be declared equal)
return -oneNegativeCompare(b,start2+1,end2,a,start1,end1);
}
j++;
}
if (first_b == NEGATIVE && first_a == NEGATIVE) {
mul = -1;
}
//skip over ZEROs
while (i <= end1) {
if (a[i] != ZERO) {
break;
}
i++;
}
while (j <= end2) {
if (b[j] != ZERO) {
break;
}
j++;
}
//skip over equal characters and stopping at the first nondigit char
//The nondigit character could be '.'
while (i <= end1 && j <= end2) {
if (!isdigit(a[i]) || a[i] != b[j]) {
break;
}
i++; j++;
}
if (i <= end1) {
first_a = a[i];
}
if (j <= end2) {
first_b = b[j];
}
//store the result of the difference. This could be final result if the
//number of digits in the mantissa is the same in both the numbers
int firstResult = first_a - first_b;
//check whether we hit a decimal in the earlier scan
if ((first_a == DECIMAL && (!isdigit(first_b) || j > end2)) ||
(first_b == DECIMAL && (!isdigit(first_a) || i > end1))) {
return ((mul < 0) ? -decimalCompare(a,i,end1,b,j,end2) :
decimalCompare(a,i,end1,b,j,end2));
}
//check the number of digits in the mantissa of the numbers
int numRemainDigits_a = 0;
int numRemainDigits_b = 0;
while (i <= end1) {
//if we encounter a non-digit treat the corresponding number as being
//smaller
if (isdigit(a[i++])) {
numRemainDigits_a++;
} else break;
}
while (j <= end2) {
//if we encounter a non-digit treat the corresponding number as being
//smaller
if (isdigit(b[j++])) {
numRemainDigits_b++;
} else break;
}
int ret = numRemainDigits_a - numRemainDigits_b;
if (ret == 0) {
return ((mul < 0) ? -firstResult : firstResult);
} else {
return ((mul < 0) ? -ret : ret);
}
}
private boolean isdigit(byte b) {
if ('0' <= b && b <= '9') {
return true;
}
return false;
}
private int decimalCompare(byte[] a, int i, int end1,
byte[] b, int j, int end2) {
if (i > end1) {
//if a[] has nothing remaining
return -decimalCompare1(b, ++j, end2);
}
if (j > end2) {
//if b[] has nothing remaining
return decimalCompare1(a, ++i, end1);
}
if (a[i] == DECIMAL && b[j] == DECIMAL) {
while (i <= end1 && j <= end2) {
if (a[i] != b[j]) {
if (isdigit(a[i]) && isdigit(b[j])) {
return a[i] - b[j];
}
if (isdigit(a[i])) {
return 1;
}
if (isdigit(b[j])) {
return -1;
}
return 0;
}
i++; j++;
}
if (i > end1 && j > end2) {
return 0;
}
if (i > end1) {
//check whether there is a non-ZERO digit after potentially
//a number of ZEROs (e.g., a=.4444, b=.444400004)
return -decimalCompare1(b, j, end2);
}
if (j > end2) {
//check whether there is a non-ZERO digit after potentially
//a number of ZEROs (e.g., b=.4444, a=.444400004)
return decimalCompare1(a, i, end1);
}
}
else if (a[i] == DECIMAL) {
return decimalCompare1(a, ++i, end1);
}
else if (b[j] == DECIMAL) {
return -decimalCompare1(b, ++j, end2);
}
return 0;
}
private int decimalCompare1(byte[] a, int i, int end) {
while (i <= end) {
if (a[i] == ZERO) {
i++;
continue;
}
if (isdigit(a[i])) {
return 1;
} else {
return 0;
}
}
return 0;
}
private int oneNegativeCompare(byte[] a, int start1, int end1,
byte[] b, int start2, int end2) {
//here a[] is negative and b[] is positive
//We have to ascertain whether the number contains any digits.
//If it does, then it is a smaller number for sure. If not,
//then we need to scan b[] to find out whether b[] has a digit
//If b[] does contain a digit, then b[] is certainly
//greater. If not, that is, both a[] and b[] don't contain
//digits then they should be considered equal.
if (!isZero(a, start1, end1)) {
return -1;
}
//reached here - this means that a[] is a ZERO
if (!isZero(b, start2, end2)) {
return -1;
}
//reached here - both numbers are basically ZEROs and hence
//they should compare equal
return 0;
}
private boolean isZero(byte a[], int start, int end) {
//check for zeros in the significand part as well as the decimal part
//note that we treat the non-digit characters as ZERO
int i = start;
//we check the significand for being a ZERO
while (i <= end) {
if (a[i] != ZERO) {
if (a[i] != DECIMAL && isdigit(a[i])) {
return false;
}
break;
}
i++;
}
if (i != (end+1) && a[i++] == DECIMAL) {
//we check the decimal part for being a ZERO
while (i <= end) {
if (a[i] != ZERO) {
if (isdigit(a[i])) {
return false;
}
break;
}
i++;
}
}
return true;
}
}
| |
package com.leo.ostbm;
import com.leo.ostbm.Resources.Facepic;
import com.leo.ostbm.StringUtil.SplitResult;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import java.awt.*;
import java.awt.font.TextAttribute;
import java.awt.image.BufferedImage;
import java.util.List;
import java.util.*;
public class TextboxUtil {
public static final Map<Integer, Color> TEXTBOX_PRESET_COLORS;
public static final Map<Integer, String> TEXTBOX_PRESET_COLOR_NAMES;
private static final Map<Integer, ParsedTextbox> tpdCache = new HashMap<>();
private static BufferedImage textboxImage = Resources.getTextboxImage();
static {
final Map<Integer, Color> colors = new HashMap<>();
final Map<Integer, String> cnames = new HashMap<>();
colors.put(0, new Color(255, 255, 255));
cnames.put(0, "white");
colors.put(1, new Color(255, 64, 64));
cnames.put(1, "red");
colors.put(2, new Color(0, 224, 0));
cnames.put(2, "green");
colors.put(3, new Color(255, 255, 0));
cnames.put(3, "yellow");
colors.put(4, new Color(64, 64, 255));
cnames.put(4, "blue");
colors.put(5, new Color(255, 64, 255));
cnames.put(5, "purple");
colors.put(6, new Color(64, 255, 255));
cnames.put(6, "cyan");
colors.put(7, new Color(128, 128, 128));
cnames.put(7, "gray");
TEXTBOX_PRESET_COLORS = Collections.unmodifiableMap(colors);
TEXTBOX_PRESET_COLOR_NAMES = Collections.unmodifiableMap(cnames);
}
@Contract("_, null -> null")
public static ParsedTextbox parseTextbox(final String face, final String text) {
if (text == null)
return null;
if (tpdCache.containsKey(Objects.hash(face, text)))
return tpdCache.get(Objects.hash(face, text));
final List<StyleSpan> styleSpans = new LinkedList<>();
final List<TextboxError> errors = new LinkedList<>();
final Map<Integer, List<TextboxModifier>> mods = new LinkedHashMap<>();
final ParsedTextbox ret = new ParsedTextbox(styleSpans, errors, mods);
final StringBuilder strippedBuilder = new StringBuilder();
final String[] lines = text.split("\n");
final int[] strippedChars = new int[lines.length];
if (lines.length > 4)
errors.add(new TextboxError(-1, "Too many lines: has " + lines.length + " lines, but maximum is 4 lines"));
int styleOff = 0, modPos = 0;
Color col = Color.WHITE;
String format = "";
boolean escape = false;
for (int i = 0; i < lines.length; i++) {
StyleSpan.StyleType defType = StyleSpan.StyleType.NORMAL;
if (i > 3)
defType = StyleSpan.StyleType.ERROR;
final SplitResult sp = StringUtil.split(lines[i], '\\');
if (sp.partCount == 0) {
// no mods here
final String line = lines[i];
strippedBuilder.append(line);
styleSpans.add(new StyleSpan(defType, 0, line.length(), col, format));
continue;
}
// first part is always not a modifier
final int firstLen = sp.parts[0].length();
if (firstLen > 0) {
final String part = sp.parts[0];
modPos += firstLen;
strippedBuilder.append(part);
styleSpans.add(new StyleSpan(defType, styleOff, firstLen, col, format));
}
for (int j = 1; j < sp.partCount; j++) {
final int ind = sp.partIndex[j];
final String part = sp.parts[j];
if (escape) {
escape = false;
String partBS = "\\" + part;
modPos += partBS.length();
strippedBuilder.append(partBS);
styleSpans.add(new StyleSpan(defType, styleOff + ind - 1, partBS.length() + 1, col, format));
continue;
} else if (part.length() == 0) {
escape = true;
continue;
}
final char mod = part.charAt(0);
if (!TextboxModifier.MOD_CHARS.containsKey(mod)) {
modPos += 2;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, 2));
errors.add(new TextboxError(i, "Unknown modifier: '" + mod + "'"));
continue;
}
final TextboxModifier.ModType modType = TextboxModifier.MOD_CHARS.get(mod);
int modLen = 2;
final boolean noArgsPossible = Arrays.binarySearch(modType.getArgumentNumbers(), 0) >= 0;
final boolean noArgs = part.indexOf('[') < 0;
if (!noArgsPossible && noArgs) {
modPos += 2;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, 2));
errors.add(new TextboxError(i, "Bad modifier argument number: got 0 args for modifier '" + mod
+ "', but that modifier does not accept 0 arguments"));
continue;
}
String[] args = new String[0];
if (!noArgs) {
final int argsInd = 2;
final int end = part.indexOf(']');
if (end < 0) {
modPos += 2;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, 3));
errors.add(new TextboxError(i,
"Bad modifier format: args for modifier '" + mod + "' are never closed"));
continue;
}
if (argsInd == end) {
if (!noArgsPossible) {
modPos += 2;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, 2));
errors.add(new TextboxError(i, "Bad modifier argument number: got 0 args for modifier '"
+ mod + "', but that modifier does not accept that number of arguments"));
continue;
}
} else {
args = part.substring(argsInd, end).split(",");
if (Arrays.binarySearch(modType.getArgumentNumbers(), args.length) < 0) {
modPos += 2;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, 2));
errors.add(new TextboxError(i,
"Bad modifier argument number: got " + args.length + " args for modifier '" + mod
+ "', but that modifier does not accept that number of arguments"));
continue;
}
}
modLen += end;
}
strippedChars[i] += modLen;
switch (modType) {
case FACE:
if (args.length > 0 && Resources.getFace(args[0]) == null) {
modPos += modLen;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, modLen));
errors.add(new TextboxError(i,
"Bad modifier argument: face \"" + args[0] + "\" does not exist"));
continue;
}
break;
case CHARACTER:
char val;
try {
val = (char) Integer.parseUnsignedInt(args[0], 16);
} catch (NumberFormatException e) {
modPos += modLen;
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + ind - 1, modLen));
errors.add(new TextboxError(i,
"Bad modifier argument: \"" + args[0] + "\" is not a hex value"));
continue;
}
modPos++;
strippedBuilder.append(val);
break;
default:
break;
}
final TextboxModifier modObj = new TextboxModifier(modType, args);
Main.LOGGER.trace("adding " + modObj + " to index " + modPos);
ret.addModifier(modPos, modObj);
styleSpans.add(new StyleSpan(StyleSpan.StyleType.MODIFIER, styleOff + ind - 1, modLen));
final String normPart = part.substring(modLen - 1);
modPos += normPart.length();
strippedBuilder.append(normPart);
Main.LOGGER.trace("next index will be " + modPos + " (after adding " + normPart.length() + ")");
switch (modObj.type) {
case COLOR:
col = getColorModValue(modObj, Color.WHITE);
break;
case FORMAT:
if (modObj.args.length == 0)
format = "";
else
format = modObj.args[0].toLowerCase();
break;
default:
break;
}
styleSpans.add(new StyleSpan(defType, styleOff + ind - 1 + modLen, part.length(), col, format));
}
strippedBuilder.append('\n');
styleOff += lines[i].length() + 1;
}
ret.strippedText = strippedBuilder.toString();
// pass 2: length of stripped lines
styleOff = 0;
final String[] strippedLines = ret.strippedText.split("\n");
for (int i = 0; i < strippedLines.length; i++) {
int maxLen = 57;
final boolean hasFace = !Resources.FACE_BLANK.equals(face);
if (hasFace)
maxLen -= 10;
final int len = strippedLines[i].length();
if (len == 0) {
styleOff++;
continue;
}
if (len > maxLen) {
styleSpans.add(new StyleSpan(StyleSpan.StyleType.ERROR, styleOff + strippedChars[i] + maxLen, len));
errors.add(new TextboxError(i, "Line too long: has " + len + " characters , but maximum is " + maxLen
+ " characters (with" + (!hasFace ? "out" : "") + " face)"));
}
styleOff += len + 1;
}
tpdCache.put(Objects.hash(face, text), ret);
return ret;
}
public static void setTextboxImage(@NotNull BufferedImage textboxImage) {
TextboxUtil.textboxImage = textboxImage;
}
@NotNull
public static BufferedImage drawTextbox(final String face, final String text, final boolean drawArrow,
final int arrowOffset) {
return drawTextbox(face, parseTextbox(face, text), drawArrow, arrowOffset);
}
@NotNull
public static BufferedImage drawTextbox(final String face, final ParsedTextbox tpd, final boolean drawArrow,
final int arrowOffset) {
final BufferedImage ret = new BufferedImage(608, 128, BufferedImage.TYPE_INT_ARGB);
final Graphics g = ret.getGraphics();
drawTextbox(g, face, tpd, 0, 0, drawArrow, arrowOffset);
return ret;
}
@NotNull
public static BufferedImage drawTextbox(final String face, final String text, final boolean drawArrow) {
return drawTextbox(face, text, drawArrow, 0);
}
@NotNull
public static BufferedImage drawTextbox(final String face, final ParsedTextbox tpd, final boolean drawArrow) {
return drawTextbox(face, tpd, drawArrow, 0);
}
public static void drawTextbox(final Graphics g, final String face, final String text, final int x, final int y,
final boolean drawArrow, final int arrowOffset) {
drawTextbox(g, face, parseTextbox(face, text), x, y, drawArrow, arrowOffset);
}
public static void drawTextbox(@NotNull final Graphics g, final String face, final ParsedTextbox tpd, final int x,
final int y, final boolean drawArrow, final int arrowOffset) {
if (textboxImage == null)
textboxImage = Resources.getTextboxImage();
g.drawImage(textboxImage, x, y, null);
final Facepic faceObj = Resources.getFace(face);
if (faceObj != null)
g.drawImage(faceObj.getImage(), x + 496, y + 16, null);
if (drawArrow)
g.drawImage(Resources.getTextboxArrow(), x + 299, y + 118 + arrowOffset, null);
g.setColor(Color.WHITE);
drawTextboxString(g, tpd, x + 20, y + 10);
}
public static void drawTextbox(final Graphics g, final String face, final String text, final int x, final int y,
final boolean drawArrow) {
drawTextbox(g, face, text, x, y, drawArrow, 0);
}
private static Color getColorModValue(@NotNull final TextboxModifier mod, final Color defaultColor) {
if (mod.type != TextboxModifier.ModType.COLOR)
return defaultColor;
Color retColor = defaultColor;
final String[] cdata = mod.args;
if (cdata.length == 3)
retColor = new Color(Integer.parseInt(cdata[0]), Integer.parseInt(cdata[1]), Integer.parseInt(cdata[2]));
else if (cdata.length == 1) {
int preset;
try {
preset = Integer.parseInt(cdata[0]);
if (TEXTBOX_PRESET_COLORS.containsKey(preset))
retColor = TEXTBOX_PRESET_COLORS.get(preset);
} catch (final NumberFormatException e) {
String col = cdata[0];
if (col.toLowerCase().startsWith("h:")) {
if (col.length() < 8)
return retColor;
col = col.substring(0, 8);
try {
retColor = Color.decode("0x" + col.substring(2));
return retColor;
} catch (final NumberFormatException e1) {
// ignore this error
}
}
final String cname = col.toLowerCase();
for (final Map.Entry<Integer, String> entry : TEXTBOX_PRESET_COLOR_NAMES.entrySet())
if (cname.equals(entry.getValue())) {
retColor = TEXTBOX_PRESET_COLORS.get(entry.getKey());
break;
}
}
}
return retColor;
}
private static void drawTextboxString(final Graphics g, @NotNull final ParsedTextbox tpd, int x, int y) {
Graphics2D g2 = (Graphics2D) g;
g2.setFont(new Font("Terminus", Font.BOLD, 20));
final int startX = x;
final Color defaultCol = g2.getColor();
FontMetrics fm = g2.getFontMetrics();
final int lineSpace = fm.getHeight() + 1;
final String text = tpd.strippedText;
int currentChar = 0;
final String[] lines = text.split("\n");
for (int i = 0; i < lines.length; i++) {
final String line = lines[i];
y += lineSpace;
x = startX;
final char[] chars = line.toCharArray();
for (final char c : chars) {
if (tpd.mods.containsKey(currentChar)) {
final List<TextboxModifier> mods = tpd.mods.get(currentChar);
for (int j = 0; j < mods.size(); j++) {
final TextboxModifier mod = mods.get(j);
Main.LOGGER.trace(i + ":" + currentChar + " - mod " + j + " is " + mod);
switch (mod.type) {
case COLOR:
g.setColor(getColorModValue(mod, defaultCol));
break;
case FORMAT:
Font f = g2.getFont();
final String format = mod.args.length == 0 ? "" : mod.args[0].toLowerCase();
Map<TextAttribute, Object> map = new Hashtable<>();
if (format.contains("b"))
map.put(TextAttribute.WEIGHT, TextAttribute.WEIGHT_REGULAR);
else
map.put(TextAttribute.WEIGHT, TextAttribute.WEIGHT_BOLD);
if (format.contains("i")) {
map.put(TextAttribute.POSTURE, TextAttribute.POSTURE_OBLIQUE);
//g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
} else {
map.put(TextAttribute.POSTURE, -1);
//g2.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_OFF);
}
if (format.contains("u"))
map.put(TextAttribute.UNDERLINE, TextAttribute.UNDERLINE_ON);
else
map.put(TextAttribute.UNDERLINE, -1);
if (format.contains("s"))
map.put(TextAttribute.STRIKETHROUGH, TextAttribute.STRIKETHROUGH_ON);
else
map.put(TextAttribute.STRIKETHROUGH, -1);
f = f.deriveFont(map);
g2.setFont(f);
fm = g2.getFontMetrics();
break;
default:
break;
}
}
}
g2.drawString(Character.toString(c), x, y);
x += fm.charWidth(c);
currentChar++;
}
}
g.setColor(defaultCol);
}
@NotNull
public static List<BufferedImage> makeTextboxAnimation(@NotNull final List<Textbox> boxes) {
BufferedImage oldTextboxImage = textboxImage;
setTextboxImage(Resources.getTextboxImageOpaque());
final List<BufferedImage> ret = new ArrayList<>();
for (int i = 0; i < boxes.size(); i++) {
final Textbox box = boxes.get(i);
final ParsedTextbox tpd = parseTextbox(box.face, box.text);
final String text = tpd.strippedText;
String face = box.face;
StringBuilder textBuilder = new StringBuilder();
String textStorage;
boolean instant = false;
int speed = 1;
int delay = speed;
if (tpd.mods.containsKey(0)) {
final List<TextboxModifier> mods = tpd.mods.get(0);
if (mods.get(0).type == TextboxModifier.ModType.INSTANT_INTERRUPT)
instant = true;
else
for (final TextboxModifier mod : mods) {
switch (mod.type) {
case DELAY:
int newDelay;
try {
newDelay = Integer.parseInt(mod.args[0]);
newDelay = Math.max(1, newDelay);
} catch (final NumberFormatException e) {
Main.LOGGER.error("Error while parsing delay!", e);
newDelay = delay;
}
delay = newDelay + speed;
break;
case SPEED:
int newSpeed;
try {
newSpeed = Integer.parseInt(mod.args[0]);
newSpeed = Math.max(1, newSpeed);
} catch (final NumberFormatException e) {
Main.LOGGER.error("Error while parsing speed!", e);
newSpeed = speed;
}
speed = newSpeed;
break;
default:
break;
}
}
}
boolean endsWithInterrupt = false;
if (tpd.mods.containsKey(text.length() - 1)) {
final List<TextboxModifier> mods = tpd.mods.get(text.length() - 1);
if (mods.get(mods.size() - 1).type == TextboxModifier.ModType.INSTANT_INTERRUPT)
endsWithInterrupt = true;
}
if (instant)
ret.add(drawTextbox(face, tpd, false));
else {
if (delay == 1)
delay = speed;
// add a blank textbox frame
for (int d = 0; d < delay; d++)
ret.add(drawTextbox(box.face, "", false));
for (int l = 0; l < text.length() - 1; l++) {
if (tpd.mods.containsKey(l + 1)) {
final List<TextboxModifier> mods = tpd.mods.get(l + 1);
for (final TextboxModifier mod : mods)
switch (mod.type) {
case DELAY:
int newDelay;
try {
newDelay = Integer.parseInt(mod.args[0]);
newDelay = Math.max(0, newDelay);
} catch (final NumberFormatException e) {
Main.LOGGER.error("Error while parsing delay!", e);
newDelay = delay;
}
delay = newDelay + speed;
break;
case SPEED:
int newSpeed;
try {
newSpeed = Integer.parseInt(mod.args[0]);
newSpeed = Math.max(1, newSpeed);
} catch (final NumberFormatException e) {
Main.LOGGER.error("Error while parsing speed!", e);
newSpeed = speed;
}
speed = newSpeed;
break;
default:
break;
}
}
if (tpd.mods.containsKey(l)) {
final List<TextboxModifier> mods = tpd.mods.get(l);
for (final TextboxModifier mod : mods)
switch (mod.type) {
case FACE:
if (mod.args.length == 0) {
face = Resources.FACE_BLANK;
break;
}
final String newFace = mod.args[0];
if (Resources.getFace(newFace) != null)
face = newFace;
break;
default:
break;
}
}
textBuilder.append(text.charAt(l));
textStorage = tpd.strippedText;
tpd.strippedText = textBuilder.toString();
for (int d = 0; d < delay; d++)
ret.add(drawTextbox(face, tpd, false));
tpd.strippedText = textStorage;
delay = speed;
}
}
if (!endsWithInterrupt)
if (i == boxes.size() - 1) {
final BufferedImage frame = drawTextbox(face, tpd, false);
for (int d = 0; d < 48; d++)
ret.add(frame);
} else {
int arrowOffset = 0, dir = 1;
for (int d = 0; d < 16; d++) {
arrowOffset += dir;
if (dir == 1) {
if (arrowOffset == 1)
dir = -1;
} else if (arrowOffset == -1)
dir = 1;
final BufferedImage frame = drawTextbox(face, tpd, true, arrowOffset);
ret.add(frame);
ret.add(frame);
ret.add(frame);
}
}
}
setTextboxImage(oldTextboxImage);
return ret;
}
public static class Textbox {
public String face;
public String text;
@Contract(pure = true)
public Textbox(final String face, final String text) {
this.face = face;
this.text = text;
}
@Contract(pure = true)
public Textbox(@NotNull final Textbox other) {
this(other.face, other.text);
}
@Contract(pure = true)
public Textbox() {
this(Resources.FACE_BLANK, "");
}
@Override
public String toString() {
final ParsedTextbox tpd = parseTextbox(face, text);
if (!tpd.errors.isEmpty())
return "(has errors)";
String t = tpd.strippedText.trim();
t = t.replace('\n', ' ');
final int maxLen = 27;
if (t.length() > maxLen)
t = t.substring(0, maxLen) + "...";
if (t.isEmpty())
t = "(empty)";
else
t = "\"" + t + "\"";
return t;
}
}
public static class TextboxModifier {
public static final Map<Character, ModType> MOD_CHARS;
static {
final Map<Character, ModType> mc = new HashMap<>();
for (final ModType type : ModType.values()) {
if (mc.containsKey(type.getModChar()))
throw new RuntimeException("Duplicate mod character key!");
mc.put(type.getModChar(), type);
}
MOD_CHARS = Collections.unmodifiableMap(mc);
}
public final ModType type;
public final String[] args;
@Contract(pure = true)
public TextboxModifier(final ModType type, final String[] args) {
this.type = type;
this.args = args;
}
@Override
public String toString() {
return "TextboxModifier [type=" + type + ", args=" + Arrays.toString(args) + "]";
}
public enum ModType {
FACE('@', 0, 1),
COLOR('c', 0, 1, 3),
DELAY('d', 1),
INSTANT_INTERRUPT('i'),
SPEED('s', 1),
CHARACTER('u', 1),
FORMAT('f', 0, 1),
BS_ESCAPE('\\');
private final char modChar;
private final int[] argNums;
@Contract(pure = true)
ModType(final char modChar) {
this.modChar = modChar;
argNums = new int[]{0};
}
@Contract(pure = true)
ModType(final char modChar, final int argNum) {
this.modChar = modChar;
argNums = new int[]{argNum};
}
@Contract(pure = true)
ModType(final char modChar, final int... argNums) {
this.modChar = modChar;
this.argNums = argNums;
}
@Contract(pure = true)
public char getModChar() {
return modChar;
}
@Contract(pure = true)
public int[] getArgumentNumbers() {
return argNums;
}
}
}
public static class StyleSpan {
public final StyleType type;
public final int pos;
public final int length;
public final Color color;
public final String format;
@Contract(pure = true)
public StyleSpan(final StyleType type, final int pos, final int length, final Color color,
final String format) {
this.type = type;
this.pos = pos;
this.length = length;
this.color = color;
this.format = format;
}
@Contract(pure = true)
public StyleSpan(final StyleType type, final int pos, final int length) {
this(type, pos, length, null, null);
}
public enum StyleType {
NORMAL,
MODIFIER,
ERROR
}
}
public static class TextboxError {
public final int lineNum;
public final String message;
@Contract(pure = true)
public TextboxError(final int lineNum, final String message) {
this.lineNum = lineNum;
this.message = message;
}
}
public static class ParsedTextbox {
public final List<StyleSpan> styleSpans;
public final List<TextboxError> errors;
public final Map<Integer, List<TextboxModifier>> mods;
public String strippedText;
@Contract(pure = true)
public ParsedTextbox(final List<StyleSpan> styleSpans, final List<TextboxError> errors,
final Map<Integer, List<TextboxModifier>> mods) {
this.styleSpans = styleSpans;
this.errors = errors;
this.mods = mods;
}
public void addModifier(final int pos, final TextboxModifier mod) {
List<TextboxModifier> list = mods.computeIfAbsent(pos, k -> new LinkedList<>());
list.add(mod);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package riotcmd;
import java.io.IOException ;
import java.io.InputStream ;
import java.io.OutputStream ;
import java.util.zip.GZIPOutputStream ;
import arq.cmdline.ModLangOutput ;
import arq.cmdline.ModLangParse ;
import arq.cmdline.ModContext ;
import arq.cmdline.ModTime ;
import jena.cmd.ArgDecl ;
import jena.cmd.CmdException;
import jena.cmd.CmdGeneral ;
import org.apache.jena.Jena ;
import org.apache.jena.atlas.io.IO ;
import org.apache.jena.atlas.lib.InternalErrorException ;
import org.apache.jena.atlas.lib.Pair ;
import org.apache.jena.atlas.web.ContentType ;
import org.apache.jena.atlas.web.TypedInputStream ;
import org.apache.jena.query.ARQ ;
import org.apache.jena.riot.* ;
import org.apache.jena.riot.lang.LabelToNode ;
import org.apache.jena.riot.lang.StreamRDFCounting ;
import org.apache.jena.riot.out.NodeToLabel ;
import org.apache.jena.riot.process.inf.InfFactory ;
import org.apache.jena.riot.process.inf.InferenceSetupRDFS ;
import org.apache.jena.riot.system.* ;
import org.apache.jena.riot.tokens.Tokenizer ;
import org.apache.jena.riot.tokens.TokenizerFactory ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.sparql.core.DatasetGraphFactory ;
import org.apache.jena.system.JenaSystem ;
/** Common framework for running RIOT parsers */
public abstract class CmdLangParse extends CmdGeneral
{
static { JenaSystem.init(); }
protected ModTime modTime = new ModTime() ;
protected ModLangParse modLangParse = new ModLangParse() ;
protected ModLangOutput modLangOutput = new ModLangOutput() ;
protected InferenceSetupRDFS setup = null ;
protected ModContext modContext = new ModContext() ;
protected ArgDecl strictDecl = new ArgDecl(ArgDecl.NoValue, "strict") ;
protected boolean cmdStrictMode = false ;
interface LangHandler {
String getItemsName() ;
String getRateName() ;
}
static LangHandler langHandlerQuads = new LangHandler() {
@Override
public String getItemsName() { return "quads" ; }
@Override
public String getRateName() { return "QPS" ; }
} ;
static LangHandler langHandlerTriples = new LangHandler() {
@Override
public String getItemsName() { return "triples" ; }
@Override
public String getRateName() { return "TPS" ; }
} ;
static LangHandler langHandlerAny = new LangHandler() {
@Override
public String getItemsName() { return "tuples" ; }
@Override
public String getRateName() { return "TPS" ; }
} ;
protected LangHandler langHandlerOverall = null ;
protected CmdLangParse(String[] argv)
{
super(argv) ;
addModule(modContext) ;
addModule(modTime) ;
addModule(modLangOutput) ;
addModule(modLangParse) ;
super.modVersion.addClass(Jena.class) ;
// Force - sometimes initialization does not cause these
// to initialized early enough for reflection.
String x1 = ARQ.VERSION ;
String x2 = ARQ.BUILD_DATE ;
super.modVersion.addClass(RIOT.class) ;
}
@Override
protected String getSummary() {
return getCommandName()+" [--time] [--check|--noCheck] [--sink] [--base=IRI] [--out=FORMAT] [--compress] file ..." ;
}
protected long totalMillis = 0 ;
protected long totalTuples = 0 ;
OutputStream output = System.out ;
StreamRDF outputStream = null ;
@Override
protected void processModulesAndArgs() {
cmdStrictMode = super.contains(strictDecl) ;
}
protected interface PostParseHandler { void postParse(); }
@Override
protected void exec() {
boolean oldStrictValue = SysRIOT.isStrictMode() ;
if ( modLangParse.strictMode() )
SysRIOT.setStrictMode(true) ;
try { exec$() ; }
finally { SysRIOT.setStrictMode(oldStrictValue) ; }
}
protected void exec$() {
if ( modLangParse.getRDFSVocab() != null )
setup = new InferenceSetupRDFS(modLangParse.getRDFSVocab()) ;
if ( modLangOutput.compressedOutput() ) {
try { output = new GZIPOutputStream(output, true) ; }
catch (IOException e) { IO.exception(e);}
}
outputStream = null ;
PostParseHandler postParse = null ;
outputStream = createStreamSink() ;
if ( outputStream == null ) {
Pair<StreamRDF, PostParseHandler> p = createAccumulateSink() ;
outputStream = p.getLeft() ;
postParse = p.getRight();
}
try {
if ( super.getPositional().isEmpty() )
parseFile("-");
else {
boolean b = super.getPositional().size() > 1;
for ( String fn : super.getPositional() ) {
if ( b && !super.isQuiet() )
SysRIOT.getLogger().info("File: " + fn);
parseFile(fn);
}
}
if ( postParse != null )
postParse.postParse();
if ( super.getPositional().size() > 1 && modTime.timingEnabled() )
output("Total", totalTuples, totalMillis, langHandlerOverall) ;
} finally {
if ( output != System.out )
IO.close(output) ;
else
IO.flush(output);
System.err.flush() ;
}
}
public void parseFile(String filename) {
TypedInputStream in = null ;
if ( filename.equals("-") ) {
in = new TypedInputStream(System.in) ;
parseFile("http://base/", "stdin", in) ;
} else {
try {
in = RDFDataMgr.open(filename) ;
} catch (Exception ex) {
System.err.println("Can't open '"+filename+"' "+ex.getMessage()) ;
return ;
}
parseFile(null, filename, in) ;
IO.close(in) ;
}
}
public void parseFile(String defaultBaseURI, String filename, TypedInputStream in) {
String baseURI = modLangParse.getBaseIRI() ;
if ( baseURI == null )
baseURI = defaultBaseURI ;
parseRIOT(baseURI, filename, in) ;
}
protected abstract Lang selectLang(String filename, ContentType contentType, Lang dftLang ) ;
protected void parseRIOT(String baseURI, String filename, TypedInputStream in) {
ContentType ct = in.getMediaType() ;
baseURI = SysRIOT.chooseBaseIRI(baseURI, filename) ;
boolean checking = true ;
if ( modLangParse.explicitChecking() ) checking = true ;
if ( modLangParse.explicitNoChecking() ) checking = false ;
ErrorHandler errHandler = null ;
if ( checking )
{
if ( modLangParse.stopOnBadTerm() )
errHandler = ErrorHandlerFactory.errorHandlerStd ;
else
// Try to go on if possible. This is the default behaviour.
errHandler = ErrorHandlerFactory.errorHandlerWarn ;
}
if ( modLangParse.skipOnBadTerm() )
{
// TODO skipOnBadterm
}
Lang lang = selectLang(filename, ct, RDFLanguages.NQUADS) ;
LangHandler handler = null ;
if ( RDFLanguages.isQuads(lang) )
handler = langHandlerQuads ;
else if ( RDFLanguages.isTriples(lang) )
handler = langHandlerTriples ;
else
throw new CmdException("Undefined language: "+lang) ;
// If multiple files, choose the overall labels.
if ( langHandlerOverall == null )
langHandlerOverall = handler ;
else
{
if ( langHandlerOverall != langHandlerAny )
{
if ( langHandlerOverall != handler )
langHandlerOverall = langHandlerAny ;
}
}
// Make a flag.
// Input and output subflags.
// If input is "label, then output using NodeToLabel.createBNodeByLabelRaw() ;
// else use NodeToLabel.createBNodeByLabel() ;
// Also, as URI.
final boolean labelsAsGiven = false ;
NodeToLabel labels = SyntaxLabels.createNodeToLabel() ;
if ( labelsAsGiven )
labels = NodeToLabel.createBNodeByLabelEncoded() ;
StreamRDF s = outputStream ;
if ( setup != null )
s = InfFactory.inf(s, setup) ;
StreamRDFCounting sink = StreamRDFLib.count(s) ;
s = null ;
ReaderRIOT reader = RDFDataMgr.createReader(lang) ;
try {
if ( checking ) {
if ( lang == RDFLanguages.NTRIPLES || lang == RDFLanguages.NQUADS )
reader.setParserProfile(RiotLib.profile(baseURI, false, true, errHandler)) ;
else
reader.setParserProfile(RiotLib.profile(baseURI, true, true, errHandler)) ;
} else
reader.setParserProfile(RiotLib.profile(baseURI, false, false, errHandler)) ;
if ( labelsAsGiven ) {
FactoryRDF f = RiotLib.factoryRDF(LabelToNode.createUseLabelAsGiven()) ;
reader.getParserProfile().setFactoryRDF(f);
}
modTime.startTimer() ;
sink.start() ;
reader.read(in, baseURI, ct, sink, null) ;
sink.finish() ;
} catch (RiotException ex) {
// Should have handled the exception and logged a message by now.
// System.err.println("++++"+ex.getMessage());
if ( modLangParse.stopOnBadTerm() )
return ;
} finally {
// Not close the output - we may write again to the underlying output stream in another call to parse a file.
IO.close(in) ;
}
long x = modTime.endTimer() ;
long n = sink.countTriples()+sink.countQuads() ;
if ( modTime.timingEnabled() )
output(filename, n, x, handler) ;
totalMillis += x ;
totalTuples += n ;
}
/** Create a streaming output sink if possible */
protected StreamRDF createStreamSink() {
if ( modLangParse.toBitBucket() )
return StreamRDFLib.sinkNull() ;
RDFFormat fmt = modLangOutput.getOutputStreamFormat() ;
if ( fmt == null )
return null ;
/** Create an accumulating output stream for later pretty printing */
return StreamRDFWriter.getWriterStream(output, fmt) ;
}
/** Create an accumulating output stream for later pretty printing */
protected Pair<StreamRDF, PostParseHandler> createAccumulateSink() {
final DatasetGraph dsg = DatasetGraphFactory.create() ;
StreamRDF sink = StreamRDFLib.dataset(dsg) ;
final RDFFormat fmt = modLangOutput.getOutputFormatted() ;
PostParseHandler handler = new PostParseHandler() {
@Override
public void postParse() {
// Try as dataset, then as graph.
WriterDatasetRIOTFactory w = RDFWriterRegistry.getWriterDatasetFactory(fmt) ;
if ( w != null ) {
RDFDataMgr.write(output, dsg, fmt) ;
return ;
}
WriterGraphRIOTFactory wg = RDFWriterRegistry.getWriterGraphFactory(fmt) ;
if ( wg != null ) {
RDFDataMgr.write(System.out, dsg.getDefaultGraph(), fmt) ;
return ;
}
throw new InternalErrorException("failed to find the writer: "+fmt) ;
}
} ;
return Pair.create(sink, handler) ;
}
protected Tokenizer makeTokenizer(InputStream in) {
Tokenizer tokenizer = TokenizerFactory.makeTokenizerUTF8(in) ;
return tokenizer ;
}
protected void output(String label, long numberTriples, long timeMillis, LangHandler handler) {
double timeSec = timeMillis/1000.0 ;
System.out.flush() ;
System.err.printf("%s : %,5.2f sec %,d %s %,.2f %s\n",
label,
timeMillis/1000.0, numberTriples,
handler.getItemsName(),
timeSec == 0 ? 0.0 : numberTriples/timeSec,
handler.getRateName()) ;
}
protected void output(String label) {
System.err.printf("%s : \n", label) ;
}
}
| |
package com.demigodsrpg.demigods.greek.structure;
import com.demigodsrpg.demigods.engine.conversation.Administration;
import com.demigodsrpg.demigods.engine.data.TimedServerData;
import com.demigodsrpg.demigods.engine.deity.Deity;
import com.demigodsrpg.demigods.engine.entity.player.DemigodsCharacter;
import com.demigodsrpg.demigods.engine.entity.player.DemigodsPlayer;
import com.demigodsrpg.demigods.engine.schematic.Schematic;
import com.demigodsrpg.demigods.engine.schematic.Selection;
import com.demigodsrpg.demigods.engine.structure.DemigodsStructure;
import com.demigodsrpg.demigods.engine.structure.DemigodsStructureType;
import com.demigodsrpg.demigods.engine.util.Colors;
import com.demigodsrpg.demigods.engine.util.Configs;
import com.demigodsrpg.demigods.engine.util.Messages;
import com.demigodsrpg.demigods.engine.util.Zones;
import com.demigodsrpg.demigods.greek.language.English;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import org.bukkit.*;
import org.bukkit.block.Block;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.MaterialData;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
public class Shrine extends GreekStructureType {
private static final String name = "Shrine";
private static final Function<Location, GreekStructureType.Design> getDesign = new Function<Location, GreekStructureType.Design>() {
@Override
public GreekStructureType.Design apply(Location reference) {
switch (reference.getBlock().getBiome()) {
case HELL:
return ShrineDesign.NETHER;
default:
return ShrineDesign.GENERAL;
}
}
};
private static final Function<GreekStructureType.Design, DemigodsStructure> createNew = new Function<GreekStructureType.Design, DemigodsStructure>() {
@Override
public DemigodsStructure apply(GreekStructureType.Design design) {
DemigodsStructure save = new DemigodsStructure();
save.setSanctifiers(new HashMap<String, Long>());
save.setCorruptors(new HashMap<String, Long>());
return save;
}
};
private static final DemigodsStructureType.InteractFunction<Boolean> sanctify = new DemigodsStructureType.InteractFunction<Boolean>() {
@Override
public Boolean apply(DemigodsStructure data, DemigodsCharacter character) {
if (!character.alliedTo(DemigodsCharacter.get(data.getOwner()))) return false;
Location location = data.getBukkitLocation();
location.getWorld().playSound(location, Sound.CAT_PURREOW, 0.7F, 0.9F);
MaterialData colorData = Colors.getMaterial(character.getDeity().getColor());
location.getWorld().playEffect(location.clone().add(0, 1, 0), Effect.STEP_SOUND, colorData.getItemTypeId(), colorData.getData());
return true;
}
};
private static final DemigodsStructureType.InteractFunction<Boolean> corrupt = new DemigodsStructureType.InteractFunction<Boolean>() {
@Override
public Boolean apply(DemigodsStructure data, DemigodsCharacter character) {
if (character.alliedTo(DemigodsCharacter.get(data.getOwner()))) return false;
Location location = data.getBukkitLocation();
location.getWorld().playSound(location, Sound.WITHER_HURT, 0.4F, 1.5F);
location.getWorld().playEffect(location.clone().add(0, 1, 0), Effect.STEP_SOUND, Material.REDSTONE_BLOCK.getId());
character.getBukkitOfflinePlayer().getPlayer().sendMessage(ChatColor.RED + "This shrine has " + (data.getSanctity() - data.getCorruption()) + " sanctity left!");
return true;
}
};
private static final DemigodsStructureType.InteractFunction<Boolean> birth = new DemigodsStructureType.InteractFunction<Boolean>() {
@Override
public Boolean apply(DemigodsStructure data, DemigodsCharacter character) {
Location location = data.getBukkitLocation();
location.getWorld().strikeLightningEffect(location);
location.getWorld().strikeLightningEffect(character.getLocation());
return true;
}
};
private static final DemigodsStructureType.InteractFunction<Boolean> kill = new DemigodsStructureType.InteractFunction<Boolean>() {
@Override
public Boolean apply(DemigodsStructure data, DemigodsCharacter character) {
Location location = data.getBukkitLocation();
location.getWorld().playSound(location, Sound.WITHER_DEATH, 1F, 1.2F);
location.getWorld().createExplosion(location, 2F, false);
character.addKill();
return true;
}
};
private static final Set<DemigodsStructureType.Flag> flags = new HashSet<DemigodsStructureType.Flag>() {
{
add(DemigodsStructureType.Flag.DELETE_WITH_OWNER);
add(DemigodsStructureType.Flag.DESTRUCT_ON_BREAK);
add(DemigodsStructureType.Flag.TRIBUTE_LOCATION);
}
};
private static final Listener listener = new Listener() {
@EventHandler(priority = EventPriority.HIGH)
public void createAndRemove(PlayerInteractEvent event) {
if (event.getClickedBlock() == null) return;
if (Zones.inNoDemigodsZone(event.getPlayer().getLocation())) return;
// Define variables
Block clickedBlock = event.getClickedBlock();
Location location = clickedBlock.getLocation();
Player player = event.getPlayer();
if (DemigodsPlayer.isImmortal(player)) {
DemigodsCharacter character = DemigodsCharacter.of(player);
if (event.getAction() == Action.RIGHT_CLICK_BLOCK && !character.getDeity().getFlags().contains(Deity.Flag.NO_SHRINE) && character.getDeity().getClaimItems().keySet().contains(event.getPlayer().getItemInHand().getType()) && Util.validBlockConfiguration(event.getClickedBlock())) {
try {
// Shrine created!
DemigodsStructure save = inst().createNew(true, null, location);
save.setOwner(character.getId());
inst().birth(save, character);
// Log the generation
Messages.info(com.demigodsrpg.demigods.engine.language.English.LOG_STRUCTURE_CREATED.getLine().replace("{structure}", name + " (" + character.getDeity() + ")").replace("{locX}", location.getX() + "").replace("{locY}", location.getY() + "").replace("{locZ}", location.getZ() + "").replace("{world}", location.getWorld().getName()).replace("{creator}", player.getName()));
// Consume item in hand
ItemStack item = player.getItemInHand();
if (item.getAmount() > 1) {
player.getItemInHand().setAmount(item.getAmount() - 1);
} else {
player.setItemInHand(new ItemStack(Material.AIR));
}
for (String string : English.NOTIFICATION_SHRINE_CREATED.getLines())
player.sendMessage(string.replace("{alliance}", character.getAlliance() + "s").replace("{deity}", character.getDeity().getName()));
event.setCancelled(true);
} catch (Exception errored) {
// Creation of shrine failed...
Messages.warning(errored.getMessage());
}
}
}
if (Administration.Util.useWand(player) && DemigodsStructureType.Util.partOfStructureWithType(location, "Shrine")) {
event.setCancelled(true);
DemigodsStructure save = DemigodsStructureType.Util.getStructureRegional(location);
DemigodsCharacter owner = DemigodsCharacter.get(save.getOwner());
if (TimedServerData.exists(player.getName(), "destroy_shrine")) {
// Remove the Shrine
save.remove();
TimedServerData.remove(player.getName(), "destroy_shrine");
// Log the generation
Messages.info(com.demigodsrpg.demigods.engine.language.English.LOG_STRUCTURE_REMOVED.getLine().replace("{structure}", name + " (" + owner.getDeity() + ")").replace("{locX}", location.getX() + "").replace("{locY}", location.getY() + "").replace("{locZ}", location.getZ() + "").replace("{world}", location.getWorld().getName()).replace("{creator}", player.getName()));
// Tell the administrator
player.sendMessage(ChatColor.GREEN + English.ADMIN_WAND_REMOVE_SHRINE_COMPLETE.getLine());
} else {
TimedServerData.saveTimed(player.getName(), "destroy_shrine", true, 5, TimeUnit.SECONDS);
player.sendMessage(ChatColor.RED + English.ADMIN_WAND_REMOVE_SHRINE.getLine());
}
}
}
};
private static final int radius = Configs.getSettingInt("zones.shrine_radius");
private static final Predicate<Player> allow = new Predicate<Player>() {
@Override
public boolean apply(Player player) {
return true;
}
};
private static final float sanctity = 250F, sanctityRegen = 1F;
private static final Schematic general = new Schematic("general", "_Alex", 2) {
{
// Create the main block
add(new Selection(0, 1, 0, Material.GOLD_BLOCK));
// Create the ender chest and the block below
add(new Selection(0, 0, 0, Material.ENDER_CHEST));
add(new Selection(0, -1, 0, Material.SMOOTH_BRICK));
// Create the rest
add(new Selection(-1, 0, 0, Material.SMOOTH_STAIRS));
add(new Selection(1, 0, 0, Material.SMOOTH_STAIRS, (byte) 1));
add(new Selection(0, 0, -1, Material.SMOOTH_STAIRS, (byte) 2));
add(new Selection(0, 0, 1, Material.SMOOTH_STAIRS, (byte) 3));
}
};
private static final Schematic nether = new Schematic("nether", "HmmmQuestionMark", 2) {
{
// Create the main block
add(new Selection(0, 1, 0, Material.GOLD_BLOCK));
// Create the ender chest and the block below
add(new Selection(0, 0, 0, Material.ENDER_CHEST));
add(new Selection(0, -1, 0, Material.NETHER_BRICK));
// Create the rest
add(new Selection(-1, 0, 0, Material.NETHER_BRICK_STAIRS));
add(new Selection(1, 0, 0, Material.NETHER_BRICK_STAIRS, (byte) 1));
add(new Selection(0, 0, -1, Material.NETHER_BRICK_STAIRS, (byte) 2));
add(new Selection(0, 0, 1, Material.NETHER_BRICK_STAIRS, (byte) 3));
}
};
private static final int generationPoints = 1;
private Shrine() {
super(name, ShrineDesign.values(), getDesign, createNew, sanctify, corrupt, birth, kill, flags, listener, radius, allow, sanctity, sanctityRegen, generationPoints);
}
public static enum ShrineDesign implements GreekStructureType.Design {
GENERAL("general", general, new Selection(0, 1, 0)), NETHER("nether", nether, new Selection(0, 1, 0));
private final String name;
private final Schematic schematic;
private final Selection clickableBlocks;
private ShrineDesign(String name, Schematic schematic, Selection clickableBlocks) {
this.name = name;
this.schematic = schematic;
this.clickableBlocks = clickableBlocks;
}
@Override
public String getName() {
return name;
}
@Override
public Set<Location> getClickableBlocks(Location reference) {
return clickableBlocks.getBlockLocations(reference);
}
@Override
public Schematic getSchematic(DemigodsStructure unused) {
return schematic;
}
}
public static class Util {
public static boolean validBlockConfiguration(Block block) {
if (!block.getType().equals(Material.GOLD_BLOCK)) return false;
if (!block.getRelative(1, 0, 0).getType().equals(Material.COBBLESTONE)) return false;
if (!block.getRelative(-1, 0, 0).getType().equals(Material.COBBLESTONE)) return false;
if (!block.getRelative(0, 0, 1).getType().equals(Material.COBBLESTONE)) return false;
return block.getRelative(0, 0, -1).getType().equals(Material.COBBLESTONE) && !block.getRelative(1, 0, 1).getType().isSolid() && !block.getRelative(1, 0, -1).getType().isSolid() && !block.getRelative(-1, 0, 1).getType().isSolid() && !block.getRelative(-1, 0, -1).getType().isSolid();
}
}
private static final DemigodsStructureType INST = new Shrine();
public static DemigodsStructureType inst() {
return INST;
}
}
| |
/**
* Black Duck JIRA Plugin
*
* Copyright (C) 2020 Synopsys, Inc.
* https://www.synopsys.com/
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.blackducksoftware.integration.jira.workflow.notification;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import com.synopsys.integration.blackduck.api.UriSingleResponse;
import com.synopsys.integration.blackduck.api.core.BlackDuckResponse;
import com.synopsys.integration.blackduck.api.generated.view.ComponentVersionView;
import com.synopsys.integration.blackduck.api.generated.view.ComponentView;
import com.synopsys.integration.blackduck.api.generated.view.IssueView;
import com.synopsys.integration.blackduck.api.generated.view.PolicyRuleView;
import com.synopsys.integration.blackduck.api.generated.view.ProjectVersionView;
import com.synopsys.integration.blackduck.api.generated.view.VersionBomComponentView;
import com.synopsys.integration.util.Stringable;
public class NotificationContentDetail extends Stringable {
private final String notificationGroup;
private final String contentDetailKey;
private final Optional<String> projectName;
private final Optional<String> projectVersionName;
private final Optional<UriSingleResponse<ProjectVersionView>> projectVersion;
private final Optional<String> componentName;
private final Optional<UriSingleResponse<ComponentView>> component;
private final Optional<String> componentVersionName;
private final Optional<UriSingleResponse<ComponentVersionView>> componentVersion;
private final Optional<String> policyName;
private final Optional<UriSingleResponse<PolicyRuleView>> policy;
private final Optional<String> componentVersionOriginName;
private final Optional<UriSingleResponse<IssueView>> componentIssue;
private final Optional<String> componentVersionOriginId;
private final Optional<UriSingleResponse<VersionBomComponentView>> bomComponent;
public final static String CONTENT_KEY_GROUP_BOM_EDIT = "bom_edit";
public final static String CONTENT_KEY_GROUP_LICENSE = "license";
public final static String CONTENT_KEY_GROUP_POLICY = "policy";
public final static String CONTENT_KEY_GROUP_VULNERABILITY = "vulnerability";
public final static String CONTENT_KEY_SEPARATOR = "|";
// @formatter:off
public static NotificationContentDetail createDetail(
final String notificationGroup
,final Optional<String> projectName
,final Optional<String> projectVersionName
,final Optional<String> projectVersionUri
,final Optional<String> componentName
,final Optional<String> componentUri
,final Optional<String> componentVersionName
,final Optional<String> componentVersionUri
,final Optional<String> policyName
,final Optional<String> policyUri
,final Optional<String> componentVersionOriginName
,final Optional<String> componentIssueUri
,final Optional<String> componentVersionOriginId
,final Optional<String> bomComponent
) {
return new NotificationContentDetail(
notificationGroup
,projectName
,projectVersionName
,projectVersionUri
,componentName
,componentUri
,componentVersionName
,componentVersionUri
,policyName
,policyUri
,componentVersionOriginName
,componentIssueUri
,componentVersionOriginId
,bomComponent
);
}
// @formatter:on
// @formatter:off
private NotificationContentDetail(
final String notificationGroup
,final Optional<String> projectName
,final Optional<String> projectVersionName
,final Optional<String> projectVersion
,final Optional<String> componentName
,final Optional<String> component
,final Optional<String> componentVersionName
,final Optional<String> componentVersion
,final Optional<String> policyName
,final Optional<String> policy
,final Optional<String> componentVersionOriginName
,final Optional<String> componentIssue
,final Optional<String> componentVersionOriginId
,final Optional<String> bomComponent
) {
this.notificationGroup = notificationGroup;
this.projectName = projectName;
this.projectVersionName = projectVersionName;
this.projectVersion = createUriSingleResponse(projectVersion, ProjectVersionView.class);
this.componentName = componentName;
this.component = createUriSingleResponse(component, ComponentView.class);
this.componentVersionName = componentVersionName;
this.componentVersion = createUriSingleResponse(componentVersion, ComponentVersionView.class);
this.policyName = policyName;
this.policy = createUriSingleResponse(policy, PolicyRuleView.class);
this.componentVersionOriginName = componentVersionOriginName;
this.componentIssue = createUriSingleResponse(componentIssue, IssueView.class);
this.componentVersionOriginId = componentVersionOriginId;
this.bomComponent = createUriSingleResponse(bomComponent, VersionBomComponentView.class);
contentDetailKey = createContentDetailKey();
}
// @formatter:on
private <T extends BlackDuckResponse> Optional<UriSingleResponse<T>> createUriSingleResponse(final Optional<String> uri, final Class<T> responseClass) {
if (uri.isPresent()) {
return Optional.of(new UriSingleResponse<>(uri.get(), responseClass));
}
return Optional.empty();
}
private String createContentDetailKey() {
final StringBuilder keyBuilder = new StringBuilder();
keyBuilder.append(notificationGroup);
keyBuilder.append(CONTENT_KEY_SEPARATOR);
if (projectVersion.isPresent()) {
keyBuilder.append(projectVersion.get().getUri().hashCode());
}
keyBuilder.append(CONTENT_KEY_SEPARATOR);
if (component.isPresent()) {
keyBuilder.append(component.get().getUri().hashCode());
}
keyBuilder.append(CONTENT_KEY_SEPARATOR);
if (componentVersion.isPresent()) {
keyBuilder.append(componentVersion.get().getUri().hashCode());
}
keyBuilder.append(CONTENT_KEY_SEPARATOR);
if (policy.isPresent()) {
keyBuilder.append(policy.get().getUri().hashCode());
keyBuilder.append(CONTENT_KEY_SEPARATOR);
}
if (bomComponent.isPresent()) {
keyBuilder.append(bomComponent.get().getUri().hashCode());
}
keyBuilder.append(CONTENT_KEY_SEPARATOR);
final String key = keyBuilder.toString();
return key;
}
public boolean hasComponentVersion() {
return componentVersion.isPresent();
}
public boolean hasOnlyComponent() {
return component.isPresent();
}
public boolean isPolicy() {
return policy.isPresent();
}
public boolean isVulnerability() {
return CONTENT_KEY_GROUP_VULNERABILITY.equals(notificationGroup);
}
public boolean isBomEdit() {
return CONTENT_KEY_GROUP_BOM_EDIT.equals(notificationGroup);
}
public List<UriSingleResponse<? extends BlackDuckResponse>> getPresentLinks() {
final List<UriSingleResponse<? extends BlackDuckResponse>> presentLinks = new ArrayList<>();
if (projectVersion.isPresent()) {
presentLinks.add(projectVersion.get());
}
if (component.isPresent()) {
presentLinks.add(component.get());
}
if (componentVersion.isPresent()) {
presentLinks.add(componentVersion.get());
}
if (policy.isPresent()) {
presentLinks.add(policy.get());
}
return presentLinks;
}
public String getNotificationGroup() {
return notificationGroup;
}
public String getContentDetailKey() {
return contentDetailKey;
}
public Optional<String> getProjectName() {
return projectName;
}
public Optional<String> getProjectVersionName() {
return projectVersionName;
}
public Optional<UriSingleResponse<ProjectVersionView>> getProjectVersion() {
return projectVersion;
}
public Optional<String> getComponentName() {
return componentName;
}
public Optional<UriSingleResponse<ComponentView>> getComponent() {
return component;
}
public Optional<String> getComponentVersionName() {
return componentVersionName;
}
public Optional<UriSingleResponse<ComponentVersionView>> getComponentVersion() {
return componentVersion;
}
public Optional<String> getPolicyName() {
return policyName;
}
public Optional<UriSingleResponse<PolicyRuleView>> getPolicy() {
return policy;
}
public Optional<String> getComponentVersionOriginName() {
return componentVersionOriginName;
}
public Optional<UriSingleResponse<IssueView>> getComponentIssue() {
return componentIssue;
}
public Optional<String> getComponentVersionOriginId() {
return componentVersionOriginId;
}
public Optional<UriSingleResponse<VersionBomComponentView>> getBomComponent() {
return bomComponent;
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2018, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.xml;
import static java.util.Collections.emptyList;
import static org.jboss.weld.bootstrap.spi.BeansXml.EMPTY_BEANS_XML;
import static org.jboss.weld.bootstrap.spi.Scanning.EMPTY_SCANNING;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.EndElement;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.jboss.weld.bootstrap.spi.BeanDiscoveryMode;
import org.jboss.weld.bootstrap.spi.BeansXml;
import org.jboss.weld.bootstrap.spi.ClassAvailableActivation;
import org.jboss.weld.bootstrap.spi.Filter;
import org.jboss.weld.bootstrap.spi.Metadata;
import org.jboss.weld.bootstrap.spi.SystemPropertyActivation;
import org.jboss.weld.logging.XmlLogger;
import org.jboss.weld.metadata.BeansXmlImpl;
import org.jboss.weld.metadata.ClassAvailableActivationImpl;
import org.jboss.weld.metadata.FilterImpl;
import org.jboss.weld.metadata.ScanningImpl;
import org.jboss.weld.metadata.SystemPropertyActivationImpl;
import org.jboss.weld.metadata.WeldFilterImpl;
import org.jboss.weld.util.collections.ImmutableSet;
/**
* Simple yet efficient parser for beans.xml. This class is not thread safe and instances cannot be reused.
*
* @author Martin Kouba
*/
public class BeansXmlStreamParser {
public static final String JAVAEE_LEGACY_URI = "http://java.sun.com/xml/ns/javaee";
public static final String JAVAEE_URI = "http://xmlns.jcp.org/xml/ns/javaee";
public static final String JAKARTAEE_URI = "https://jakarta.ee/xml/ns/jakartaee";
public static final Set<String> JAVAEE_URIS = ImmutableSet.of(JAVAEE_LEGACY_URI, JAVAEE_URI, JAKARTAEE_URI);
public static final String WELD_URI = "http://jboss.org/schema/weld/beans";
public static final Set<String> SCANNING_URIS = ImmutableSet.of(WELD_URI, JAVAEE_URI, JAVAEE_LEGACY_URI, JAKARTAEE_URI);
private static final String VERSION_ATTRIBUTE_NAME = "version";
private static final String BEAN_DISCOVERY_MODE_ATTRIBUTE_NAME = "bean-discovery-mode";
private static final String NAME_ATTRIBUTE_NAME = "name";
private static final String VALUE_ATTRIBUTE_NAME = "value";
private static final String PATTERN_ATTRIBUTE_NAME = "pattern";
private static final String IF_CLASS_AVAILABLE = "if-class-available";
private static final String IF_CLASS_NOT_AVAILABLE = "if-class-not-available";
private static final String IF_SYSTEM_PROPERTY = "if-system-property";
private static final String CLASS = "class";
private static final String STEREOTYPE = "stereotype";
private static final String INCLUDE = "include";
private static final String EXCLUDE = "exclude";
private static final String TRIM = "trim";
private static final String BEANS = "beans";
private static final String ALTERNATIVES = "alternatives";
private static final String INTERCEPTORS = "interceptors";
private static final String DECORATORS = "decorators";
private static final String SCAN = "scan";
private List<Metadata<String>> enabledInterceptors = null;
private List<Metadata<String>> enabledDecorators = null;
private List<Metadata<String>> selectedAlternatives = null;
private List<Metadata<String>> selectedAlternativeStereotypes = null;
private List<Metadata<Filter>> includes = null;
private List<Metadata<Filter>> excludes = null;
private BeanDiscoveryMode discoveryMode = BeanDiscoveryMode.ANNOTATED;
private String version;
private boolean isTrimmed;
private final URL beansXml;
private final Function<String, String> interpolator;
private final BeanDiscoveryMode emptyBeansXmlDiscoveryMode;
/**
*
* @param beansXml
*/
public BeansXmlStreamParser(URL beansXml) {
this(beansXml, Function.identity(), BeanDiscoveryMode.ANNOTATED);
}
/**
*
* @param beansXml
* @param interpolator
*/
public BeansXmlStreamParser(URL beansXml, Function<String, String> interpolator) {
this(beansXml, interpolator, BeanDiscoveryMode.ANNOTATED);
}
public BeansXmlStreamParser(URL beansXml, BeanDiscoveryMode emptyBeansXmlDiscoveryMode) {
this(beansXml, Function.identity(), emptyBeansXmlDiscoveryMode);
}
public BeansXmlStreamParser(URL beansXml, Function<String, String> interpolator, BeanDiscoveryMode emptyBeansXmlDiscoveryMode) {
this.beansXml = beansXml;
this.interpolator = interpolator;
this.emptyBeansXmlDiscoveryMode = emptyBeansXmlDiscoveryMode;
}
@SuppressFBWarnings(value = "RCN_REDUNDANT_NULLCHECK_WOULD_HAVE_BEEN_A_NPE",
justification = "False positive, see https://github.com/spotbugs/spotbugs/issues/259")
public BeansXml parse() {
if (beansXml == null) {
throw XmlLogger.LOG.loadError("unknown", null);
}
try (InputStream in = beansXml.openStream()) {
if (in.available() == 0) {
// The file is just acting as a marker file
// if the legacy treatment is on, we use discovery mode as specified, otherwise we default to annotated mode
if (emptyBeansXmlDiscoveryMode.equals(BeanDiscoveryMode.ANNOTATED)) {
return EMPTY_BEANS_XML;
} else {
return new BeansXmlImpl(emptyList(), emptyList(), emptyList(), emptyList(), EMPTY_SCANNING,
null, emptyBeansXmlDiscoveryMode, null, false);
}
}
XMLInputFactory factory = XMLInputFactory.newInstance();
XMLEventReader reader = factory.createXMLEventReader(in);
StartElement element = nextStartElement(reader, BEANS, JAVAEE_URIS);
if (element != null) {
parseBeans(element);
while (reader.hasNext()) {
XMLEvent event = reader.nextEvent();
if (isEnd(event, BEANS)) {
break;
} else if (isStartElement(event, ALTERNATIVES)) {
parseAlternatives(reader, event);
} else if (isStartElement(event, INTERCEPTORS)) {
parseInterceptors(reader, event);
} else if (isStartElement(event, DECORATORS)) {
parseDecorators(reader, event);
} else if (isStartElement(event, SCAN, SCANNING_URIS)) {
parseScan(reader, event);
} else if (isStartElement(event, TRIM)) {
isTrimmed = true;
}
}
}
reader.close();
} catch (IOException e) {
throw XmlLogger.LOG.loadError(beansXml, e);
} catch (XMLStreamException e) {
throw XmlLogger.LOG.parsingError(beansXml, e);
}
return new BeansXmlImpl(orEmpty(selectedAlternatives), orEmpty(selectedAlternativeStereotypes), orEmpty(enabledDecorators),
orEmpty(enabledInterceptors), new ScanningImpl(orEmpty(includes), orEmpty(excludes)), beansXml, discoveryMode, version, isTrimmed);
}
private StartElement nextStartElement(XMLEventReader reader, String localName, Set<String> namespaces) throws XMLStreamException {
StartElement startElement = nextStartElement(reader);
if (startElement != null && localName.equals(startElement.getName().getLocalPart()) && isInNamespace(startElement.getName(), namespaces)) {
return startElement;
}
return null;
}
private StartElement nextStartElement(XMLEventReader reader) throws XMLStreamException {
while (reader.hasNext()) {
XMLEvent event = reader.nextEvent();
if (event.isStartElement()) {
return event.asStartElement();
}
}
return null;
}
@SuppressWarnings("rawtypes")
private void parseBeans(StartElement element) {
Iterator attributes = element.getAttributes();
while (attributes.hasNext()) {
Attribute attribute = (Attribute) attributes.next();
if (isLocalName(attribute.getName(), VERSION_ATTRIBUTE_NAME)) {
version = attribute.getValue();
} else if (isLocalName(attribute.getName(), BEAN_DISCOVERY_MODE_ATTRIBUTE_NAME)) {
discoveryMode = parseDiscoveryMode(interpolate(attribute.getValue()).trim().toUpperCase());
}
}
}
private boolean isLocalName(QName name, String value) {
Objects.requireNonNull(name);
Objects.requireNonNull(value);
return value.equals(name.getLocalPart());
}
private void parseAlternatives(XMLEventReader reader, XMLEvent event) throws XMLStreamException {
if (selectedAlternatives != null) {
throw XmlLogger.LOG.multipleAlternatives(beansXml + "@" + event.asStartElement().getLocation().getLineNumber());
}
selectedAlternatives = new LinkedList<>();
selectedAlternativeStereotypes = new LinkedList<>();
while (reader.hasNext()) {
event = reader.nextEvent();
if (isEnd(event, ALTERNATIVES)) {
return;
} else if (event.isStartElement()) {
StartElement element = (StartElement) event;
if (isStartElement(element, CLASS)) {
selectedAlternatives.add(new XmlMetadata<String>(element.getName().toString(), getTrimmedElementText(reader), beansXml,
element.getLocation().getLineNumber()));
} else if (isStartElement(element, STEREOTYPE)) {
selectedAlternativeStereotypes.add(new XmlMetadata<String>(element.getName().toString(), getTrimmedElementText(reader), beansXml,
element.getLocation().getLineNumber()));
}
}
}
}
private void parseInterceptors(XMLEventReader reader, XMLEvent event) throws XMLStreamException {
if (enabledInterceptors != null) {
throw XmlLogger.LOG.multipleInterceptors(beansXml + "@" + event.asStartElement().getLocation().getLineNumber());
}
enabledInterceptors = new LinkedList<>();
while (reader.hasNext()) {
event = reader.nextEvent();
if (isEnd(event, INTERCEPTORS)) {
return;
} else if (event.isStartElement()) {
StartElement element = event.asStartElement();
if (isStartElement(element, CLASS)) {
enabledInterceptors.add(new XmlMetadata<String>(element.getName().toString(), getTrimmedElementText(reader), beansXml,
element.getLocation().getLineNumber()));
}
}
}
}
private void parseDecorators(XMLEventReader reader, XMLEvent event) throws XMLStreamException {
if (enabledDecorators != null) {
throw XmlLogger.LOG.multipleDecorators(beansXml + "@" + event.asStartElement().getLocation().getLineNumber());
}
enabledDecorators = new LinkedList<>();
while (reader.hasNext()) {
event = reader.nextEvent();
if (isEnd(event, DECORATORS)) {
return;
} else if (event.isStartElement()) {
StartElement element = event.asStartElement();
if (isStartElement(element, CLASS)) {
enabledDecorators.add(new XmlMetadata<String>(element.getName().toString(), getTrimmedElementText(reader), beansXml,
element.getLocation().getLineNumber()));
}
}
}
}
private void parseScan(XMLEventReader reader, XMLEvent event) throws XMLStreamException {
if (excludes != null) {
throw XmlLogger.LOG.multipleScanning(beansXml + "@" + event.asStartElement().getLocation().getLineNumber());
}
excludes = new LinkedList<>();
includes = new LinkedList<>();
while (reader.hasNext()) {
event = reader.nextEvent();
if (isEnd(event, SCAN, SCANNING_URIS)) {
return;
} else if (event.isStartElement()) {
StartElement element = (StartElement) event;
if (isStartElement(element, EXCLUDE, SCANNING_URIS)) {
handleFilter(element, reader, excludes::add);
} else if (isStartElement(element, INCLUDE, SCANNING_URIS)) {
handleFilter(element, reader, includes::add);
}
}
}
}
private void handleFilter(StartElement filterElement, XMLEventReader reader, Consumer<XmlMetadata<Filter>> consumer) throws XMLStreamException {
String name = getAttribute(filterElement, NAME_ATTRIBUTE_NAME);
String pattern = name != null ? null : getAttribute(filterElement, PATTERN_ATTRIBUTE_NAME);
if (name != null || pattern != null) {
List<Metadata<SystemPropertyActivation>> systemPropertyActivations = new LinkedList<>();
List<Metadata<ClassAvailableActivation>> classAvailableActivations = new LinkedList<>();
while (reader.hasNext()) {
XMLEvent event = reader.nextEvent();
if (isEnd(event, EXCLUDE, SCANNING_URIS) || isEnd(event, INCLUDE, SCANNING_URIS)) {
Filter filter;
if (filterElement.getName().getNamespaceURI().equals(WELD_URI)) {
filter = new WeldFilterImpl(name, systemPropertyActivations, classAvailableActivations, pattern);
} else {
filter = new FilterImpl(name, systemPropertyActivations, classAvailableActivations);
}
consumer.accept(new XmlMetadata<Filter>(filterElement.getName().toString(), filter, beansXml, filterElement.getLocation().getLineNumber()));
return;
} else if (event.isStartElement()) {
StartElement element = (StartElement) event;
if (isStartElement(element, IF_CLASS_AVAILABLE, SCANNING_URIS)) {
classAvailable(element, classAvailableActivations::add, false);
} else if (isStartElement(element, IF_CLASS_NOT_AVAILABLE, SCANNING_URIS)) {
classAvailable(element, classAvailableActivations::add, true);
} else if (isStartElement(element, IF_SYSTEM_PROPERTY, SCANNING_URIS)) {
systemProperty(element, systemPropertyActivations::add);
}
}
}
}
}
private void classAvailable(StartElement element, Consumer<Metadata<ClassAvailableActivation>> consumer, boolean inverse) {
String className = getAttribute(element, NAME_ATTRIBUTE_NAME);
Metadata<ClassAvailableActivation> classAvailableActivation = new XmlMetadata<ClassAvailableActivation>(element.getName().toString(),
new ClassAvailableActivationImpl(className, inverse), beansXml, element.getLocation().getLineNumber());
consumer.accept(classAvailableActivation);
}
private void systemProperty(StartElement element, Consumer<Metadata<SystemPropertyActivation>> consumer) {
String name = getAttribute(element, NAME_ATTRIBUTE_NAME);
String value = getAttribute(element, VALUE_ATTRIBUTE_NAME);
Metadata<SystemPropertyActivation> activation = new XmlMetadata<SystemPropertyActivation>(element.getName().toString(),
new SystemPropertyActivationImpl(name, value), beansXml, element.getLocation().getLineNumber());
consumer.accept(activation);
}
@SuppressWarnings("rawtypes")
private String getAttribute(StartElement element, String name) {
Iterator attributes = element.getAttributes();
while (attributes.hasNext()) {
Attribute attribute = (Attribute) attributes.next();
if (attribute.getName().getLocalPart().equals(name)) {
return interpolate(attribute.getValue().trim());
}
}
return null;
}
private boolean isStartElement(XMLEvent event, String name, Set<String> namespaces) {
if (event.isStartElement()) {
StartElement element = event.asStartElement();
return isLocalName(element.getName(), name) && isInNamespace(element.getName(), namespaces);
}
return false;
}
private boolean isStartElement(XMLEvent event, String name) {
return isStartElement(event, name, JAVAEE_URIS);
}
private boolean isEnd(XMLEvent event, String name) {
return isEnd(event, name, JAVAEE_URIS);
}
private boolean isEnd(XMLEvent event, String name, Set<String> namespaces) {
if (event.isEndElement()) {
EndElement element = (EndElement) event;
return isLocalName(element.getName(), name) && isInNamespace(element.getName(), namespaces);
}
return false;
}
private BeanDiscoveryMode parseDiscoveryMode(String value) {
for (BeanDiscoveryMode mode : BeanDiscoveryMode.values()) {
if (mode.toString().equals(value)) {
return mode;
}
}
throw new IllegalStateException("Unknown bean discovery mode: " + value);
}
private <T> List<T> orEmpty(List<T> list) {
return list == null ? Collections.emptyList() : list;
}
private boolean isInNamespace(QName name, Set<String> uris) {
String uri = name.getNamespaceURI();
if (uris == null || uri.isEmpty()) {
return true;
}
return uris.contains(uri);
}
private String getTrimmedElementText(XMLEventReader reader) throws XMLStreamException {
return interpolate(reader.getElementText().trim());
}
protected String interpolate(String value) {
return interpolator.apply(value);
}
}
| |
package it.unibo.deis.lia.ramp.service.application;
import it.unibo.deis.lia.ramp.service.application.mpeg.*;
import it.unibo.deis.lia.ramp.service.management.*;
import it.unibo.deis.lia.ramp.core.e2e.*;
import it.unibo.deis.lia.ramp.*;
import it.unibo.deis.lia.ramp.util.*;
import java.io.*;
import java.net.*;
import java.util.*;
/**
*
* @author Carlo Giannelli
*/
public class BroadcastService extends Thread{
// clients must refresh their interest at least every 11s
public static final int CLIENT_TIMEOUT = 6000;
// simple program name (without "SA=X" prefix) ==> service program handler
private final Hashtable<String,BroadcastServiceProgramHandler> programsDB = new Hashtable<String,BroadcastServiceProgramHandler>();
// quality related variables
public final static float MAX_QUALITY = 10.0F;
public final static float MIN_QUALITY = 0.0F;
//public final static float START_QUALITY = BroadcastService.MAX_QUALITY;
//public final static float START_QUALITY = (BroadcastService.MAX_QUALITY+BroadcastService.MIN_QUALITY)/2;
public final static float START_QUALITY = BroadcastService.MAX_QUALITY;
private final float MAX_DROP_RATE = 1.0F; //0.95F;
// sampling related variables
String videoTranscoder = "no transcode";
//String audioTranscoder = "mpga"; // MPEG 1 Layer 1
String audioTranscoder = "mp3"; // MPEG 1 Layer 3
//String audioTranscoder = "mp4a"; // MPEG-4 Audio (is AAC)
//String audioTranscoder = "a52"; // Vorbis
// http://wiki.videolan.org/Codec
// http://developer.android.com/guide/appendix/media-formats.html
private int videoBitrate = 768; // Kbit/s
private float videoScale = 1.0F; //0.5F; // ratio
private int audioBirate = 64; // Kbit/s
private int mtu = 1024*5; // bytes
private boolean rawBytes = false;
// ffmpeg parameter
private int gopSize = 4; // keyint (GOP size)
private String repositoryDirectory="./temp";
private String vlcDirectory;
private String vlc;
private String webcam;
private String param = "dshow"; // Fabio Pascucci
private BroadcastSplitter broadcastSplitter;
private boolean smartSplitter;
private boolean open = true;
private final BoundReceiveSocket broadcastServiceSocket;
private int protocol = E2EComm.UDP;
//private int protocol = E2EComm.TCP;
private static BroadcastService broadcastService=null;
private static BroadcastServiceJFrame bsjf;
public static synchronized BroadcastService getInstance(){
try {
if(broadcastService==null){
broadcastService = new BroadcastService();
broadcastService.start();
}
bsjf.setVisible(true);
}
catch (Exception e) {
e.printStackTrace();
}
return broadcastService;
}
public static synchronized BroadcastService getInstanceNoShow(){
try {
if(broadcastService==null){
broadcastService = new BroadcastService();
broadcastService.start();
}
}
catch (Exception e) {
e.printStackTrace();
}
return broadcastService;
}
private BroadcastService() throws Exception{
broadcastServiceSocket = E2EComm.bindPreReceive(protocol);
registerService();
vlc = "vlc";
if(RampEntryPoint.os.startsWith("linux")){
vlcDirectory="";
//webcam="/dev/video0";
webcam="default webcam";
param = "v4l2";
try{
Process pId = Runtime.getRuntime().exec("id -ru");
BufferedReader brId = new BufferedReader(new InputStreamReader(pId.getInputStream()));
String lineId;
lineId=brId.readLine();
if(lineId.equals("0")){
vlc = "vlc-wrapper";
}
}
catch(Exception e){
e.printStackTrace();
}
}
else if(RampEntryPoint.os.startsWith("mac")){
vlcDirectory="/Applications/VLC.app/Contents/MacOS/";
webcam="default webcam";
param = "dshow"; // TODO corretto?
}
else if(RampEntryPoint.os.startsWith("windows")){
//vlcDirectory="C:/Program Files/VideoLAN/VLC/";
vlcDirectory="C:/VLC-1.1.12/";
//webcam="Creative WebCam (WDM)";
//webcam="Logitech QuickCam Communicate STX";
//webcam="USB 2.0 UVC 0.3M Webcam";
webcam = "default webcam";
param = "dshow";
}
else{
vlcDirectory="???";
webcam="???";
param = "???";
}
bsjf = new BroadcastServiceJFrame(this);
}
public static BroadcastServiceJFrame getBsjf() {
return bsjf;
}
private void registerService(){
String qos = "";
if(broadcastSplitter==null){
qos += "broadcastSplitter=false";
}
else{
qos += "broadcastSplitter=true";
qos += " ";
qos += "smartBroadcastSplitter="+this.isSmartSplitter();
}
ServiceManager.getInstance(false).registerService(
"Broadcast",
broadcastServiceSocket.getLocalPort(),
protocol,
qos
);
}
synchronized public void startBroadcastSplitter(){
System.out.println("BroadcastService startBroadcastSplitter");
if(broadcastSplitter==null){
broadcastSplitter = BroadcastSplitter.getInstance(this);
registerService();
}
}
synchronized public void stopBroadcastSplitter(){
System.out.println("BroadcastService stopBroadcastSplitter");
if( broadcastSplitter != null ){
//this.disableSmartSplitter();
broadcastSplitter.stopBroadcastSplitter();
broadcastSplitter = null;
registerService();
}
}
/*public int getProtocol() {
return protocol;
}
public void setProtocol(int protocol) {
System.out.println("BroadcastService setProtocol "+protocol);
if( protocol==E2EComm.UDP || protocol==E2EComm.TCP ){
if( this.protocol!=protocol ){
this.protocol = protocol;
try {
broadcastServiceSocket.close();
} catch (IOException ex) {
ex.printStackTrace();
}
this.stopBroadcastSplitter();
String[] programList = getProgramList();
for(int i=0; i<programList.length; i++){
BroadcastServiceProgramHandler ph = programsDB.get(programList[i]);
ph.stopProgramHandler();
}
ServiceManager.getInstance(false).removeService("Broadcast");
BroadcastService bs = new BroadcastService();
bs.start();
}
}
}*/
public boolean isRawBytes() {
return rawBytes;
}
public void setRawBytes(boolean rawBytes) {
this.rawBytes = rawBytes;
}
public boolean isSmartSplitter(){
return smartSplitter;
}
synchronized public void enableSmartSplitter() throws Exception{
System.out.println("BroadcastService enableSmartSplitter");
//if( broadcastSplitter == null ){
// throw new Exception("Cannot enable SmartSplitter feature if BroadcastSplitter is not active");
//}
this.smartSplitter = true;
registerService();
}
synchronized public void disableSmartSplitter() {
System.out.println("BroadcastService disableSmartSplitter");
this.smartSplitter = false;
registerService();
}
public void showProgramConfig(String simpleProgramName){
BroadcastServiceProgramHandler ph = programsDB.get(simpleProgramName);
if(ph!=null){
ph.showProgramConfig();
}
}
public String getVlcDirectory() {
//System.out.println("BroadcastService getVlcDirectory "+vlcDirectory);
return vlcDirectory;
}
public void setVlcDirectory(String vlcDirectory) {
//System.out.println("BroadcastService setVlcDirectory "+vlcDirectory);
this.vlcDirectory = vlcDirectory+"/";
}
public String getWebcam() {
//System.out.println("BroadcastService getWebcam: "+webcam);
return webcam;
}
public void setWebcam(String webcam) {
//System.out.println("BroadcastService setWebcam: "+webcam);
this.webcam = webcam;
}
public String getRepositoryDirectory(){
return repositoryDirectory;
}
public void setRepositoryDirectory(String repositoryDirectory){
this.repositoryDirectory = repositoryDirectory;
}
public String[] getProgramList(){
String[] res = programsDB.keySet().toArray(new String[0]);
return res;
}
public String[] getSourceList(){
File dir = new File(repositoryDirectory);
String[] list = dir.list();
// filter the list of returned files
// to not return any files that start with '.'.
FilenameFilter filter = new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return !(name.startsWith("."))
&&
(
name.endsWith(".mpg")
|| name.endsWith(".mp3")
|| name.endsWith(".mp4")
|| name.endsWith(".avi")
|| name.endsWith(".ts")
|| name.endsWith(".ps")
|| name.endsWith(".mkv")
|| name.endsWith(".wmv")
|| name.endsWith(".3gp")
);
}
};
/*System.out.println("StreamService.getStreamList pre-filter list: "+list);
for(String s : list){
System.out.println("\t"+s);
}*/
list = dir.list(filter);
/*System.out.println("StreamService.getStreamList post-filter list: "+list);
for(String s : list){
System.out.println("\t"+s);
}*/
String[] res=new String[list.length+1];
System.arraycopy(list, 0, res, 0, list.length);
res[res.length-1] = webcam;
return res;
}
public void addProgram(String simpleProgramName, AbstractRtpParser rtpMpegParser, byte splitterAmount, String[] serviceSource) throws Exception{
synchronized(programsDB){
// start streaming a traversing program (from BroadcastSplitter)
BroadcastServiceProgramHandler ph = programsDB.get(simpleProgramName);
if( ph != null ){
if(ph.getSplitterAmount()==0){
throw new Exception("Program already exists and is not splitter-based: "+ph.simpleProgramName);
}
else if(ph.getSplitterAmount()<=splitterAmount){
throw new Exception("Program already with fewer or the same splitters: "+ph.simpleProgramName);
}
else{
// the new source is either local or with less splitters:
// substitute the previous source with this new source
ph.splitterAmount = splitterAmount;
ph.serviceSource = serviceSource;
ph.rtpMpegParser = rtpMpegParser;
}
}
else{
// the source is new, i.e., it does not exist locally
ph = new BroadcastServiceProgramHandler(this, simpleProgramName, rtpMpegParser, splitterAmount, serviceSource);
ph.start();
programsDB.put(simpleProgramName, ph);
System.out.println("BroadcastService: addProgram "+simpleProgramName+" with RtpMpegParser ADDED");
}
}
}
public void addProgram(String simpleProgramName) throws Exception{
System.out.println("BroadcastService: addProgram "+simpleProgramName);
synchronized(programsDB){
// start streaming something (file, dvb, webcam...)
BroadcastServiceProgramHandler ph = programsDB.remove(simpleProgramName);
if( ph != null ){
ph.stopProgramHandler();
}
ph = new BroadcastServiceProgramHandler(this, simpleProgramName);
ph.start();
programsDB.put(simpleProgramName, ph);
}
}
public void removeProgram(String simpleProgramName){
// stop stream a program
synchronized(programsDB){
BroadcastServiceProgramHandler ph = programsDB.get(simpleProgramName);
if( ph != null ){
if(ph.getSplitterAmount()==0){
ph.stopProgramHandler();
programsDB.remove(simpleProgramName);
}
else{
System.out.println("BroadcastService.removeProgram: cannot stop listener-based program "+simpleProgramName);
}
}
}
}
public void stopService(){
this.stopBroadcastSplitter();
open = false;
if(bsjf != null) bsjf.setVisible(false);
/*try {
broadcastServiceSocket.close();
} catch (IOException ex) {
ex.printStackTrace();
}*/
String[] programList = getProgramList();
for(int i=0; i<programList.length; i++){
BroadcastServiceProgramHandler ph = programsDB.get(programList[i]);
ph.stopProgramHandler();
}
ServiceManager.getInstance(false).removeService("Broadcast");
broadcastService=null;
}
public String getVideoTranscoder() {
return videoTranscoder;
}
public void setVideoTranscoder(String transcoder) {
System.out.println("BroadcastService setTranscoder "+transcoder);
this.videoTranscoder = transcoder;
}
public int getAudioBirate() {
return audioBirate;
}
public void setAudioBirate(int audioBirate) {
this.audioBirate = audioBirate;
}
public int getGopSize() {
return gopSize;
}
public void setGopSize(int gopSize) {
this.gopSize = gopSize;
}
public int getMtu() {
return mtu;
}
public void setMtu(int mtu) {
this.mtu = mtu;
}
public int getVideoBitrate() {
return videoBitrate;
}
public void setVideoBitrate(int videoBitrate) {
this.videoBitrate = videoBitrate;
}
public float getVideoScale() {
return videoScale;
}
public void setVideoScale(float videoScale) {
this.videoScale = videoScale;
}
@Override
public void run(){
System.out.println("BroadcastService START");
while(open){
try{
GenericPacket gp = E2EComm.receive(broadcastServiceSocket, 5*1000);
//System.out.println("BroadcastService new request");
new BroadcastServiceHandler(gp).start();
}
catch(SocketTimeoutException ste){
//
}
catch(Exception e){
//System.out.println("BroadcastService e = "+e.getMessage());
e.printStackTrace();
}
}
try {
broadcastServiceSocket.close();
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("BroadcastService FINISHED");
}
private class BroadcastServiceHandler extends Thread{
private GenericPacket gp;
private BroadcastServiceHandler(GenericPacket gp){
this.gp=gp;
}
@Override
public void run(){
try{
// check packet type
if( gp instanceof UnicastPacket){
// check payload
UnicastPacket up=(UnicastPacket)gp;
Object payload = E2EComm.deserialize(up.getBytePayload());
if(payload instanceof BroadcastRequest){
//System.out.println("BroadcastService BroadcastRequest");
BroadcastRequest request = (BroadcastRequest)payload;
String simpleProgramName = request.getSimpleProgramName();
String message = request.getMessage();
String[] newDest = E2EComm.ipReverse(up.getSource());
int newDestNodeId = up.getSourceNodeId();
if(message!=null && message.equals("list")){
// send broadcasting program list
//System.out.println("BroadcastService list");
String[] list = getProgramList();
for(int i=0; i<list.length; i++){
list[i] = "SA="+programsDB.get(list[i]).getSplitterAmount()+" "+list[i];
}
E2EComm.sendUnicast(
newDest,
request.getClientPort(),
protocol,
E2EComm.serialize(list)
);
}
else{
// manage receiver request
EndPoint receiver = new EndPoint(
request.getClientPort(),
newDestNodeId,
newDest
);
//System.out.println("BroadcastService requested program="+simpleProgramName+" receiver="+receiver);
BroadcastServiceProgramHandler ph = programsDB.get(simpleProgramName);
if(ph!=null){
if(message!=null && message.equals("stop")){
// stop client
System.out.println("BroadcastService stop for "+simpleProgramName+" from "+receiver);
ph.removeReceiver(receiver);
}
else{
ph.addReceiver(receiver);
if(message==null){
//System.out.println("BroadcastService refresh for "+simpleProgramName+" from "+receiver);
}
else if(message.equals("ack")){
ph.receivedAck(receiver);
}
else{
try{
float deltaQuality = Float.parseFloat(message);
ph.changeQuality(receiver, deltaQuality);
}
catch(NumberFormatException nfe){
System.out.println("BroadcastService: unknown message for "+simpleProgramName+" from "+receiver+": message="+message );
}
}
}
}
else{
System.out.println("BroadcastService unknown requested program: "+simpleProgramName+" receiver="+receiver);
}
}
//System.out.println("BroadcastService FINISHED BroadcastRequest res "+res);
}
else{
// received payload is not BroadcastService: do nothing...
System.out.println("BroadcastService wrong payload: "+payload);
}
}
else{
// received packet is not UnicastPacket: do nothing...
System.out.println("BroadcastService wrong packet: "+gp.getClass().getName());
}
}
catch(Exception e){
e.printStackTrace();
}
}
}
protected class BroadcastServiceProgramHandler extends Thread{
//private Random r = new Random();
private boolean active = true;
// per-client registration DB
private final Hashtable<EndPoint, Long> receiversDB;
// per-client required quality
private final Hashtable<EndPoint, BroadcastReceiverQuality> qualityDB;
// per-client last ack
private final Hashtable<EndPoint, Long> ackDB;
private BroadcastClient.BroadcastClientProgramHandler fakeClient = null;
private String[] serviceSource = null;
private BroadcastService bs;
private AbstractRtpParser rtpMpegParser = null;
private String simpleProgramName;
private byte splitterAmount;
private BroadcastServiceProgramConfigJFrame programConfig;
private boolean qualityTailoring = true;
private boolean streamParsing = true;
protected boolean isQualityTailoring() {
return qualityTailoring;
}
protected void setQualityTailoring(boolean qualityTailoring) {
System.out.println("BroadcastService setQualityTailoring: "+qualityTailoring);
this.qualityTailoring = qualityTailoring;
}
protected boolean isStreamParsing() {
return streamParsing;
}
protected void setStreamParsing(boolean streamParsing) {
System.out.println("BroadcastService setStreamParsing: "+streamParsing);
this.streamParsing = streamParsing;
//if( rtpMpegParser != null ){
rtpMpegParser.setParsePayload(this.isStreamParsing());
//}
System.out.println("BroadcastService setStreamParsing: rtpMpegParser.isParsePayload()="+rtpMpegParser.isParsePayload());
}
protected byte getSplitterAmount() {
return splitterAmount;
}
protected void showProgramConfig(){
programConfig.setVisible(true);
}
private void receivedAck(EndPoint receiver){
//System.out.println("BroadcastService receivedAck from "+receiver+" for "+simpleProgramName);
ackDB.put(receiver, System.currentTimeMillis());
}
private BroadcastServiceProgramHandler(BroadcastService bs, String simpleProgramName, AbstractRtpParser rtpMpegParser, byte splitterAmount, String[] serviceSource) throws Exception{
this.receiversDB = new Hashtable<EndPoint, Long>();
this.qualityDB = new Hashtable<EndPoint, BroadcastReceiverQuality>();
this.ackDB = new Hashtable<EndPoint, Long>();
this.bs = bs;
this.simpleProgramName = simpleProgramName;
this.rtpMpegParser = rtpMpegParser;
this.splitterAmount = splitterAmount;
this.programConfig = new BroadcastServiceProgramConfigJFrame(this);
this.serviceSource = serviceSource;
this.programConfig.setTitle("BroadcastService: "+simpleProgramName);
}
private BroadcastServiceProgramHandler(BroadcastService bs, String simpleProgramName) throws Exception{
this.receiversDB = new Hashtable<EndPoint, Long>();
this.qualityDB = new Hashtable<EndPoint, BroadcastReceiverQuality>();
this.ackDB = new Hashtable<EndPoint, Long>();
this.bs = bs;
this.simpleProgramName = simpleProgramName;
this.splitterAmount = 0;
this.programConfig = new BroadcastServiceProgramConfigJFrame(this);
this.programConfig.setTitle("BroadcastService: "+simpleProgramName);
DatagramSocket dsVlc = new DatagramSocket();
int vlcPort = dsVlc.getLocalPort();
// create MPEG-TS-Parser
String mux = "ts";
// timeouts
int startingTimeout = 15; // seconds
int workingTimeout = 3; // seconds
String transcodeString = "";
if(videoTranscoder.equals("mp2v")){
transcodeString +=
//":sout=" +
"#transcode{"+
"vcodec=mp2v,vb="+videoBitrate+",fps=25,scale="+videoScale+"" +
",acodec="+audioTranscoder+",ab="+audioBirate+""+
",venc=ffmpeg{" +
"keyint="+gopSize +
//+"," + "hurry-up" +
"}"+
"}";
}
else if(videoTranscoder.equals("x264")){
// #transcode{vb=800,venc=x264{subme=5,ref=3,bframes=3,deblock=0:0},scale=1.0,vcodec=h264,acodec=mp3,ab=64,threads=4}
// http://www.veetle.com/index.php/article/view/x264
transcodeString +=
//":sout=" +
"#transcode{"+
"vcodec=h264,vb="+videoBitrate+",fps=25,scale="+videoScale+"" +
",acodec="+audioTranscoder+
//",acodec=mp3" +
",ab="+audioBirate+""+
/**/
",venc=x264{" +
//"bframes=1" + // (1) (comment)
"keyint="+gopSize+"" +
",idrint="+gopSize+"" +
//",profile=baseline" + // only I and P slices (un-comment)
//",profile=extended" + // I/P/B slices (comment)
",profile=high" + // I/P/B slices (comment)
/* // http://doom10.org/index.php?topic=733.0
+
"constrained-intra," +*/
",level=5.1" +
// http://www.veetle.com/index.php/article/view/x264
",subme=9" +
",ref=2" + // (1)
",me=hex" +
",merange=24" +
",subme=9" +
",threads=2" +
//"slice-max-mbs=2," + // (1?) NO!
//"slice-max-size=120," + // NO!!!
//"intra-refresh," + // NO!!
/*
"level=3.0," +
"nocabac," +
"qpmax=36," +
"qpmin=10," +
"me=hex," +
"merange=24," +
"subme=9," +
"qcomp=0.6" +
/**/
"}" +
/**/
"}";
}
else{
transcodeString +=
//":sout=" +
"#transcode{"+
"acodec="+audioTranscoder+
"}";
}
String[] comArray;
if(simpleProgramName.startsWith("http")){
// source from remote host
startingTimeout = 45;
workingTimeout = 10;
String[] temp = {
vlcDirectory+vlc,
"http://"+simpleProgramName.split(" ")[1],
"--http-caching",""+500, // ms, default is 1200
"--mtu",""+mtu,
//"--ts-out-mtu",""+mtu,
//transcodeString +
"--sout","\""+transcodeString+
":duplicate{"+
// the following line is optional
//"dst=display,"+
// the following line is optional
//comArray[5]+="dst=std{access=file,mux=ts,dst="+repositoryDirectory+"/output_server.mpg},";
"dst=rtp{mux=ts,dst=127.0.0.1,port="+vlcPort+"}"+
"}"+"\""
//,"vlc://quit"
};
comArray = temp;
}
else if(simpleProgramName.startsWith("dvb-t")){
// source from a dvb-t device
String[] temp = {
vlcDirectory+vlc,
"dvb-t://",
":dvb-frequency="+simpleProgramName.split(" ")[1]+"000",
":program="+simpleProgramName.split(" ")[2],
"--mtu",""+mtu,
//"--ts-out-mtu",""+mtu,
//transcodeString +
"--sout","\""+transcodeString+
":duplicate{"+
// the following line is optional
//"dst=display,"+
// the following line is optional
//comArray[5]+="dst=std{access=file,mux=ts,dst="+repositoryDirectory+"/output_server.mpg},";
"dst=rtp{" +
"mux=ts,dst=127.0.0.1,port="+vlcPort+"" +
"}"+
"}"+"\"",
"vlc://quit"
};
comArray = temp;
}
else if(simpleProgramName.equals(webcam)){
// source from a webcam
String webcamName = webcam;
if(webcam.equals("default webcam")){
webcamName="";
}
String[] temp = {
vlcDirectory+vlc,
""+param+"://", // "dshow://",
":"+param+"-vdev="+webcamName, // ":dshow-vdev="+webcamName,
":"+param+"-adev=", // ":dshow-adev=",
"--mtu",""+mtu,
//"--ts-out-mtu",""+mtu,
//transcodeString +
"--sout","\""+transcodeString+
":duplicate{"+
// the following line is optional
//comArray[5]+="dst=display,";
// the following line is optional
//comArray[5]+="dst=std{access=file,mux=ts,dst="+repositoryDirectory+"/output_server.mpg},";
",dst=rtp{mux=ts,dst=127.0.0.1,port="+vlcPort+"}"+
"}"+"\"",
"vlc://quit"
};
comArray = temp;
}
else{
// source from a file
String[] temp = {
vlcDirectory+vlc,
repositoryDirectory+"/"+simpleProgramName,
"--mtu",""+mtu,
//"--ts-out-mtu",""+mtu,
//transcodeString +
"--sout","\""+transcodeString+
":duplicate{"+
// the following line is optional
//comArray[2]+="dst=display,";
// the following line is optional
//comArray[2]+="dst=std{access=file,mux=ts,dst="+repositoryDirectory+"/output_server.mpg},";
"dst=rtp{" +
"mux=ts" + // XXX UNcomment this line
// "mux=mp4" + // XXX comment this line
",dst=127.0.0.1,port="+vlcPort+
//",name=nolescam.sdp", // XXX comment this line
//",sdp=rtsp://127.0.0.1:554/"+"ciao"+".sdp" + // XXX comment this line
"}"+
"}"+"\""
//,"vlc://quit"
};
comArray = temp;
if(videoTranscoder.equals("Andr")){ // TODO remove
//android
String[] temp2 = {
// TODO http://stackoverflow.com/questions/2947369/streaming-video-using-non-standard-protocols
// TODO http://forum.videolan.org/viewtopic.php?f=4&t=60335
// TODO http://forum.xda-developers.com/archive/index.php/t-527451.html
vlcDirectory+vlc,
repositoryDirectory+"/"+simpleProgramName,
"--mtu",""+mtu,
":sout=#transcode{" +
"vcodec=h264,venc=x264{" +
"no-cabac,level=12,vbv-maxrate=384,vbv-bufsize=1000,keyint=4,ref=3,bframes=0" +
"},width=320,height=180,acodec=mp3,ab=64,vb=384" +
"}" +
":" +
"rtp{" +
//"mux=ts," +
"dst=127.0.0.1" +
",port="+vlcPort+
// ",sdp=rtsp://127.0.0.1:"+vlcPort+"/stream.sdp"+
//",mp4a-latm" +
"}",
/*"--sout","'#transcode{" +
"soverlay,ab=42,samplerate=44100,channels=1,acodec=mp4a,vcodec=h264,width=320,height=180," +
"vfilter=\"canvas{width=320,height=180,aspect=16:9}\",fps=25,vb=200," +
"venc=x264{" +
"vbv-bufsize=500,partitions=all,level=12,no-cabac,subme=7,threads=4,ref=2,mixed-refs=1," +
"bframes=0,min-keyint=1,keyint=50,trellis=2,direct=auto,qcomp=0.0,qpmax=51" +
"}" +
"}" +
":gather:rtp{" +
"mp4a-latm," +
//"sdp=rtsp://127.0.0.1:"+vlcPort+"/"+this.simpleProgramName.split("[ )]")[0]+".sdp" +
//"sdp=rtsp://127.0.0.1:"+vlcPort+"/ramp.sdp" +
",dst=127.0.0.1,port="+vlcPort+
"}'",*/
"vlc://quit"
};
//",level=1.2" +
//",nocabac," +
//",bframes=0" +
comArray = temp2;
}
}
System.out.print("BroadcastService comArray: ");
for(String s : comArray){
System.out.print(s+" ");
}
System.out.println();
List<String> comList = Arrays.asList(comArray);
ProcessBuilder pbVlcSever = new ProcessBuilder(comList);
pbVlcSever.start();
/*InputStream is;
UDPInputStream udpis = new UDPInputStream(dsVlc, startingTimeout, workingTimeout);
new Thread(udpis).start();
is = udpis;*/
if(videoTranscoder.equals("Andr")){ // TODO remove
// TODO RtpParser!!!
this.rtpMpegParser = new RtpParser(dsVlc, startingTimeout, workingTimeout);
}
else if(mux.equals("ts")){
UDPInputStream udpis = new UDPInputStream(dsVlc, startingTimeout, workingTimeout);
new Thread(udpis).start();
this.rtpMpegParser = new RtpMpegTsParser(udpis, startingTimeout, workingTimeout);
}
else{
throw new Exception("mux = "+mux);
}
this.rtpMpegParser.start();
}
private FileWriter broadcastServiceLogFileQuality = null;
@Override
public void run(){
System.out.println("BroadcastService: ProgramHandler START "+simpleProgramName);
try{
if( RampEntryPoint.isLogging() ){
broadcastServiceLogFileQuality = new FileWriter("./temp/BroadcastServiceQuality.csv");
broadcastServiceLogFileQuality.write("current time (ms)" + "," + "receiver quality" + "," + "quality variation" + "\n");
broadcastServiceLogFileQuality.flush();
}
rtpMpegParser.setParsePayload(this.isStreamParsing());
while(active){
// 1) wait for RTP packets
RTP rtp = rtpMpegParser.getRtp();
if(rtp==null){
rtp = new RTP(new byte[0]);
}
rtp.setSimpleProgramName(simpleProgramName);
//System.out.println("BroadcastServiceProgramHandler "+this.simpleProgramName+": TS packets "+rtp.getTsPackets().length+", bytes "+rtp.getBytes().length);
// 2) send this RTP packet to registered clients
EndPoint[] bcArray = receiversDB.keySet().toArray(new EndPoint[0]);
for(int i=0; i<bcArray.length; i++){
Long lastAck = ackDB.get(bcArray[i]);
if(lastAck!=null && System.currentTimeMillis()-lastAck > 3*1000){
// fast decrease
System.out.println("BroadcastService.ProgramHandler FAST decrease "+bcArray[i]);
//this.changeQuality(bcArray[i], "decrease");
//this.changeQuality(bcArray[i], "decrease");
this.changeQuality(bcArray[i], -2.0F);
ackDB.put(bcArray[i],System.currentTimeMillis());
}
Long lastContact = receiversDB.get(bcArray[i]);
if( lastContact==null || System.currentTimeMillis() - lastContact > BroadcastService.CLIENT_TIMEOUT ){
//System.out.println("BroadcastService.ProgramHandler receiversDB.get(bcArray[i]) "+receiversDB.get(bcArray[i]));
System.out.println("BroadcastService.ProgramHandler REMOVING "+bcArray[i]);
removeReceiver(bcArray[i]);
}
else{
byte[] sendingRtpBytes;
if( rtp.getRtpHeader() == null ){
System.out.println("BroadcastService FINISHING "+simpleProgramName+": sending ClosingPacket to "+bcArray[i]);
sendingRtpBytes = new byte[0];
}
else{
RTP tailoredRtpObject;
if(this.isQualityTailoring()){
//int pre = rtp.getTsPackets().length;
tailoredRtpObject = qualityTailor(rtp, bcArray[i]);
//System.out.println("BroadcastService.ProgramHandler sendingRtpObject.getTsPackets().length from "+pre+" to "+((RTP)sendingRtpObject).getTsPackets().length);
}
else{
tailoredRtpObject = rtp;
}
if( isRawBytes() ){
sendingRtpBytes = tailoredRtpObject.getBytes();
}
else{
sendingRtpBytes = E2EComm.serialize(tailoredRtpObject);
}
}
//if(sendingRtpObject!=null){
E2EComm.sendUnicast(
bcArray[i].getAddress(),
bcArray[i].getPort(),
protocol,
sendingRtpBytes
);
//}
//else{
// System.out.println("BroadcastService.ProgramHandler sendingRtp NULL");
//}
}
}
// 3) check smart splitter
if(splitterAmount>0){
// split stream
if( !isSmartSplitter() && fakeClient!=null ){
System.out.println("BroadcastService stopping FakeClient "+System.currentTimeMillis());
this.stopProgramHandler();
fakeClient.stopProgramHandler();
fakeClient = null;
}
}
if(rtp.getBytes()==null || rtp.getBytes().length==0){
throw new EndOfStreamException();
}
}
}
catch(EndOfStreamException eose){
//eose.printStackTrace();
System.out.println("BroadcastService EndOfStreamException "+simpleProgramName);
}
catch(Exception e){
e.printStackTrace();
}
/*EndPoint[] bcArray = receiversDB.keySet().toArray(new EndPoint[0]);
byte[] closingPacket = new byte[0];
for(int i=0; i<bcArray.length; i++){
try {
System.out.println("BroadcastService FINISHING "+simpleProgramName+": sending ClosingPacket to "+bcArray[i]);
E2EComm.sendUnicast(
bcArray[i].getAddress(),
bcArray[i].getClientPort(),
protocol,
closingPacket
);
} catch (Exception e) {
e.printStackTrace();
}
}*/
programConfig.setVisible(false);
bs.programsDB.remove(simpleProgramName);
rtpMpegParser.stopRtpMpegParser();
if(fakeClient!=null){
fakeClient.stopProgramHandler();
fakeClient = null;
}
try{
if( RampEntryPoint.isLogging() && broadcastServiceLogFileQuality!=null ){
broadcastServiceLogFileQuality.close();
broadcastServiceLogFileQuality = null;
}
}
catch(Exception e){
e.printStackTrace();
}
if( RampEntryPoint.isLogging() ){
tailoringTimes.removeElementAt(0);
tailoringTimes.removeElementAt(0);
tailoringTimes.removeElementAt(0);
Collections.sort(tailoringTimes);
int maxValue = Math.round(tailoringTimes.size()*0.95F);
Long[] elapsedArray = tailoringTimes.toArray(new Long[0]);
float countMean = 0;
for(int i=0; i<maxValue; i++){
countMean += elapsedArray[i];
//System.out.println("BroadcastService tailoringTimes["+i+"] (ms): "+elapsedArray[i]/(1000.0F)/(1000.0F));
}
float mean = countMean/((float)maxValue);
//for(int i=maxValue; i<elapsedArray.length; i++){
// System.out.println("DISCARDING BroadcastService tailoringTimes["+i+"] (ms): "+elapsedArray[i]/(1000.0F)/(1000.0F));
//}
float countStddev = 0;
for(int i=0; i<maxValue; i++){
float x = elapsedArray[i] - mean;
countStddev += ( x * x );
}
float y = countStddev/((float)maxValue);
float stddev = (float)Math.sqrt(y);
System.out.println(
"95% BroadcastService tailoringTimes " +
"mean (ms): "+mean/(1000.0F)/(1000.0F)+" " +
"stddev (ms): "+stddev/(1000.0F)/(1000.0F)+" " +
"elapsedArray.length "+maxValue
);
maxValue = Math.round(tailoringTimes.size()*0.90F);
elapsedArray = tailoringTimes.toArray(new Long[0]);
countMean = 0;
for(int i=0; i<maxValue; i++){
countMean += elapsedArray[i];
//System.out.println("BroadcastService tailoringTimes["+i+"] (ms): "+elapsedArray[i]/(1000.0F)/(1000.0F));
}
mean = countMean/((float)maxValue);
//for(int i=maxValue; i<elapsedArray.length; i++){
// System.out.println("DISCARDING BroadcastService tailoringTimes["+i+"] (ms): "+elapsedArray[i]/(1000.0F)/(1000.0F));
//}
countStddev = 0;
for(int i=0; i<maxValue; i++){
float x = elapsedArray[i] - mean;
countStddev += ( x * x );
}
y = countStddev/((float)maxValue);
stddev = (float)Math.sqrt(y);
System.out.println(
"90% BroadcastService tailoringTimes " +
"mean (ms): "+mean/(1000.0F)/(1000.0F)+" " +
"stddev (ms): "+stddev/(1000.0F)/(1000.0F)+" " +
"elapsedArray.length "+maxValue
);
}
System.out.println("BroadcastService: ProgramHandler FINISHED "+simpleProgramName);
}
private void stopProgramHandler(){
System.out.println("BroadcastService: ProgramHandler.stopProgramHandler "+simpleProgramName+" "+System.currentTimeMillis());
rtpMpegParser.stopRtpMpegParser();
this.active = false;
//rtpMpegParser.
}
private synchronized void addReceiver(EndPoint receiver){
//System.out.println("BroadcastService addReceiver "+receiver+" for "+this.simpleProgramName);
//System.out.println("BroadcastService addReceiver pre: receiversDB "+receiversDB);
if(receiversDB.containsKey(receiver)){
receiversDB.put(receiver, System.currentTimeMillis());
}
else{
// new receiver
System.out.println("BroadcastService addReceiver: new receiver "+receiver+" for "+simpleProgramName+" "+System.currentTimeMillis());
ackDB.put(receiver,System.currentTimeMillis());
qualityDB.put(receiver, new BroadcastReceiverQuality(Math.round(BroadcastService.START_QUALITY)));
// add the receiver after quality and ack
receiversDB.put(receiver, System.currentTimeMillis());
/*System.out.println("BroadcastService addReceiver: " +
"splitterAmount="+splitterAmount + " " +
"bs.isSmartSplitter()="+bs.isSmartSplitter() + " " +
"receiversDB.size()="+receiversDB.size() + " " +
"fakeClient="+fakeClient);*/
// if first client of a split program with smart splitter enabled
// then create a fake local client with rediscover to the source of the program
if(splitterAmount>0 && bs.isSmartSplitter() && receiversDB.size()>0 && fakeClient==null){
try{
System.out.println("BroadcastService CREATE fakeClient for "+simpleProgramName);
// look for the service sending this source
String[] serviceDest = E2EComm.ipReverse(serviceSource);
ServiceResponse service = ServiceDiscovery.findService(serviceDest, "Broadcast", 3*1000);
System.out.println("BroadcastService fakeClient service: "+service);
// create the fake client
if(service!=null){
byte newSplitterAmount = (byte)(this.splitterAmount - 1);
this.fakeClient = BroadcastClient.getInstanceNoShow(true).new BroadcastClientProgramHandler(service, this.simpleProgramName, newSplitterAmount, true);
this.fakeClient.start();
System.out.println("BroadcastService fakeClient activated");
}
else{
System.out.println("BroadcastService fakeClient service not found");
}
}
catch(Exception e){
e.printStackTrace();
}
}
}
}
private synchronized void removeReceiver(EndPoint receiver){
System.out.println("BroadcastService REMOVING receiver "+receiver+" for "+simpleProgramName+" "+System.currentTimeMillis());
receiversDB.remove(receiver);
qualityDB.remove(receiver);
ackDB.remove(receiver);
// if removing the only client of a split program with smart splitter enabled,
// then disable the fake local client with rediscover to the source of the program
if(receiversDB.isEmpty() && fakeClient!=null){
// stop the fake client
fakeClient.stopProgramHandler();
fakeClient = null;
System.out.println("BroadcastService REMOVING fakeClient for "+simpleProgramName+" "+System.currentTimeMillis());
}
}
//private void changeQuality(EndPoint receiver, String quality){
private void changeQuality(EndPoint receiver, float deltaQuality){
//System.out.println("BroadcastService: ProgramHandler.changeQuality "+programName+" "+quality);
//System.out.println("BroadcastService receiver="+receiver+" deltaQuality="+deltaQuality);
//System.out.println("BroadcastService old quality level: "+qualityDB.get(receiver));
BroadcastReceiverQuality receiverQuality = qualityDB.get(receiver);
float currentQuality = receiverQuality.getCurrentQuality();
receiverQuality.setCurrentQuality(currentQuality + deltaQuality);
if( receiverQuality.getCurrentQuality() > MAX_QUALITY ){
receiverQuality.setCurrentQuality(MAX_QUALITY);
}
else if( receiverQuality.getCurrentQuality() < MIN_QUALITY ){
receiverQuality.setCurrentQuality(MIN_QUALITY);
}
try{
if( RampEntryPoint.isLogging() && broadcastServiceLogFileQuality!=null ){
/*int qualityInt = -1000; //
if(deltaQuality.equals("increase")){
qualityInt = +1;
}
else if(deltaQuality.equals("decrease")){
qualityInt = -1;
}*/
//broadcastServiceLogFileQuality.write(System.currentTimeMillis() + "," + qualityDB.get(receiver).getCurrentQuality() + "," + qualityInt + "\n");
broadcastServiceLogFileQuality.write(System.currentTimeMillis() + "," + qualityDB.get(receiver).getCurrentQuality() + "," + deltaQuality + "\n");
broadcastServiceLogFileQuality.flush();
}
}
catch(Exception e){
e.printStackTrace();
}
receiverQuality = qualityDB.get(receiver);
currentQuality = receiverQuality.getCurrentQuality();
float mean = (MAX_QUALITY+MIN_QUALITY)/2.0F;
/*if( currentQuality == MAX_QUALITY ){
// remove nothing
receiverQuality.setPBDropRate(0.0F);
receiverQuality.setIDropRate(0.0F);
}
else if( currentQuality == MIN_QUALITY ){
// remove everything (but not PES header, PSI and audio)
receiverQuality.setPBDropRate(MAX_DROP_RATE);
receiverQuality.setIDropRate(MAX_DROP_RATE/10.0F*9.0F);
}
else */
if( currentQuality >= mean ){
// do not remove I frames
float newNsdDropRate = MAX_DROP_RATE - MAX_DROP_RATE*(currentQuality-mean)/(MAX_QUALITY-mean);
if( newNsdDropRate > 1.0F ){
newNsdDropRate = 1.0F;
}
receiverQuality.setNsdDropRate(newNsdDropRate);
receiverQuality.setSdDropRate(0.0F);
}
else {//if(currentQuality<mean){
// remove every P/B frame and some I frames
receiverQuality.setNsdDropRate(MAX_DROP_RATE);
float newSdDropRate = MAX_DROP_RATE - MAX_DROP_RATE*(currentQuality-mean)/(MAX_QUALITY-mean);
if( newSdDropRate > 0.9F ){
newSdDropRate = 0.9F;
}
receiverQuality.setSdDropRate(newSdDropRate);
}
//System.out.println("BroadcastService iDropRate="+receiverQuality.getIDropRate()+" pbDropRate="+receiverQuality.getPBDropRate());
System.out.println("BroadcastService new quality level: "+qualityDB.get(receiver));
}
private Vector<Long> tailoringTimes = new Vector<Long>();
private int gopFrame = 0;
private RTP qualityTailor(final RTP rtp, EndPoint receiver) throws Exception{
/**/long startTailoring = -1;
if( RampEntryPoint.isLogging() ){
startTailoring = System.nanoTime();
}/**/
RTP tailoredRTP = (RTP)(rtp.clone());
BroadcastReceiverQuality receiverQuality = qualityDB.get(receiver);
for(int i=0; i<tailoredRTP.getTsPackets().length; i++){
TSPacket tsPacket = tailoredRTP.getTsPackets()[i];
byte frameType = tsPacket.getFrameType();
//System.out.println("BroadcastService frameType="+frameType+" pid="+tsPacket.getPid());
if(tsPacket.isPayloadUnitStart()){
//System.out.println("BroadcastService isPayloadUnitStart "+tsPacket.getPid());
if( frameType==TSPacket.UNDEFINED ){
//System.out.println("BroadcastService.qualityTailor: null frameType???");
}
else if( frameType==TSPacket.AUDIO ){
receiverQuality.setCurrentAudioPid(tsPacket.getPid());
}
else if( TSPacket.isVideo(frameType) ){
// MPEG2 & h264
//System.out.println("BroadcastService.qualityTailor: frameType = "+frameType);
receiverQuality.setCurrentVideoPid(tsPacket.getPid());
receiverQuality.setDropCurrentVideoFrame(false);
float random = RampEntryPoint.nextRandomFloat();
if( frameType == TSPacket.I_FRAME // mpeg2
|| frameType == TSPacket.IDR_FRAME ){ // h264 Instantaneous Decoder Refresh (IDR)
gopFrame = 0;
if( random < receiverQuality.getSdDropRate() ){
receiverQuality.setDropCurrentVideoFrame(true);
}
}
else if( frameType==TSPacket.P_FRAME
|| frameType==TSPacket.B_FRAME ){ // mpeg2
gopFrame++;
if( random < receiverQuality.getNsdDropRate() ){
receiverQuality.setDropCurrentVideoFrame(true);
}
}
else if( frameType==TSPacket.NON_IDR_FRAME ){ // h264
gopFrame++;
float gopProb = 1-((float)gopFrame/(float)(gopSize-1)); // # P frames = gopSize -1
//System.out.println("BroadcastService.qualityTailor: gopFrame="+gopFrame+" gopSize="+gopSize+" gopProb="+gopProb);
// start dropping frames from the last of the GoP
if( receiverQuality.getNsdDropRate() > gopProb ){
receiverQuality.setDropCurrentVideoFrame(true);
}
}
}
} // END isPayloadUnitStart
if( tsPacket.getPid() == receiverQuality.getCurrentVideoPid() ){
if( receiverQuality.isDropCurrentVideoFrame() ){
//System.out.println("BroadcastService.qualityTailor: dropping TS Packet "+receiverQuality.getCurrentVideoPid());
tailoredRTP.removeTsPacket(i);
i--;
}
}
} // end for
/**/
if( RampEntryPoint.isLogging() && startTailoring!=-1 ){
long endTailoring = System.nanoTime();
long elapsedTailoring = endTailoring - startTailoring;
//System.out.println("BroadcastService elapsedTailoring: "+elapsedTailoring);
tailoringTimes.addElement(elapsedTailoring);
}
/**/
return tailoredRTP;
}
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.cache;
import static com.google.common.base.Preconditions.checkArgument;
import com.google.common.annotations.GwtCompatible;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import java.util.concurrent.Callable;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Statistics about the performance of a {@link Cache}. Instances of this class are immutable.
*
* <p>Cache statistics are incremented according to the following rules:
*
* <ul>
* <li>When a cache lookup encounters an existing cache entry {@code hitCount} is incremented.
* <li>When a cache lookup first encounters a missing cache entry, a new entry is loaded.
* <ul>
* <li>After successfully loading an entry {@code missCount} and {@code loadSuccessCount}
* are incremented, and the total loading time, in nanoseconds, is added to {@code
* totalLoadTime}.
* <li>When an exception is thrown while loading an entry, {@code missCount} and {@code
* loadExceptionCount} are incremented, and the total loading time, in nanoseconds, is
* added to {@code totalLoadTime}.
* <li>Cache lookups that encounter a missing cache entry that is still loading will wait
* for loading to complete (whether successful or not) and then increment {@code
* missCount}.
* </ul>
* <li>When an entry is evicted from the cache, {@code evictionCount} is incremented.
* <li>No stats are modified when a cache entry is invalidated or manually removed.
* <li>No stats are modified by operations invoked on the {@linkplain Cache#asMap asMap} view of
* the cache.
* </ul>
*
* <p>A lookup is specifically defined as an invocation of one of the methods {@link
* LoadingCache#get(Object)}, {@link LoadingCache#getUnchecked(Object)}, {@link Cache#get(Object,
* Callable)}, or {@link LoadingCache#getAll(Iterable)}.
*
* @author Charles Fry
* @since 10.0
*/
@GwtCompatible
public final class CacheStats {
private final long hitCount;
private final long missCount;
private final long loadSuccessCount;
private final long loadExceptionCount;
@SuppressWarnings("GoodTime") // should be a java.time.Duration
private final long totalLoadTime;
private final long evictionCount;
/**
* Constructs a new {@code CacheStats} instance.
*
* <p>Five parameters of the same type in a row is a bad thing, but this class is not constructed
* by end users and is too fine-grained for a builder.
*/
@SuppressWarnings("GoodTime") // should accept a java.time.Duration
public CacheStats(
long hitCount,
long missCount,
long loadSuccessCount,
long loadExceptionCount,
long totalLoadTime,
long evictionCount) {
checkArgument(hitCount >= 0);
checkArgument(missCount >= 0);
checkArgument(loadSuccessCount >= 0);
checkArgument(loadExceptionCount >= 0);
checkArgument(totalLoadTime >= 0);
checkArgument(evictionCount >= 0);
this.hitCount = hitCount;
this.missCount = missCount;
this.loadSuccessCount = loadSuccessCount;
this.loadExceptionCount = loadExceptionCount;
this.totalLoadTime = totalLoadTime;
this.evictionCount = evictionCount;
}
/**
* Returns the number of times {@link Cache} lookup methods have returned either a cached or
* uncached value. This is defined as {@code hitCount + missCount}.
*/
public long requestCount() {
return hitCount + missCount;
}
/** Returns the number of times {@link Cache} lookup methods have returned a cached value. */
public long hitCount() {
return hitCount;
}
/**
* Returns the ratio of cache requests which were hits. This is defined as {@code hitCount /
* requestCount}, or {@code 1.0} when {@code requestCount == 0}. Note that {@code hitRate +
* missRate =~ 1.0}.
*/
public double hitRate() {
long requestCount = requestCount();
return (requestCount == 0) ? 1.0 : (double) hitCount / requestCount;
}
/**
* Returns the number of times {@link Cache} lookup methods have returned an uncached (newly
* loaded) value, or null. Multiple concurrent calls to {@link Cache} lookup methods on an absent
* value can result in multiple misses, all returning the results of a single cache load
* operation.
*/
public long missCount() {
return missCount;
}
/**
* Returns the ratio of cache requests which were misses. This is defined as {@code missCount /
* requestCount}, or {@code 0.0} when {@code requestCount == 0}. Note that {@code hitRate +
* missRate =~ 1.0}. Cache misses include all requests which weren't cache hits, including
* requests which resulted in either successful or failed loading attempts, and requests which
* waited for other threads to finish loading. It is thus the case that {@code missCount >=
* loadSuccessCount + loadExceptionCount}. Multiple concurrent misses for the same key will result
* in a single load operation.
*/
public double missRate() {
long requestCount = requestCount();
return (requestCount == 0) ? 0.0 : (double) missCount / requestCount;
}
/**
* Returns the total number of times that {@link Cache} lookup methods attempted to load new
* values. This includes both successful load operations, as well as those that threw exceptions.
* This is defined as {@code loadSuccessCount + loadExceptionCount}.
*/
public long loadCount() {
return loadSuccessCount + loadExceptionCount;
}
/**
* Returns the number of times {@link Cache} lookup methods have successfully loaded a new value.
* This is usually incremented in conjunction with {@link #missCount}, though {@code missCount} is
* also incremented when an exception is encountered during cache loading (see {@link
* #loadExceptionCount}). Multiple concurrent misses for the same key will result in a single load
* operation. This may be incremented not in conjunction with {@code missCount} if the load occurs
* as a result of a refresh or if the cache loader returned more items than was requested. {@code
* missCount} may also be incremented not in conjunction with this (nor {@link
* #loadExceptionCount}) on calls to {@code getIfPresent}.
*/
public long loadSuccessCount() {
return loadSuccessCount;
}
/**
* Returns the number of times {@link Cache} lookup methods threw an exception while loading a new
* value. This is usually incremented in conjunction with {@code missCount}, though {@code
* missCount} is also incremented when cache loading completes successfully (see {@link
* #loadSuccessCount}). Multiple concurrent misses for the same key will result in a single load
* operation. This may be incremented not in conjunction with {@code missCount} if the load occurs
* as a result of a refresh or if the cache loader returned more items than was requested. {@code
* missCount} may also be incremented not in conjunction with this (nor {@link #loadSuccessCount})
* on calls to {@code getIfPresent}.
*/
public long loadExceptionCount() {
return loadExceptionCount;
}
/**
* Returns the ratio of cache loading attempts which threw exceptions. This is defined as {@code
* loadExceptionCount / (loadSuccessCount + loadExceptionCount)}, or {@code 0.0} when {@code
* loadSuccessCount + loadExceptionCount == 0}.
*/
public double loadExceptionRate() {
long totalLoadCount = loadSuccessCount + loadExceptionCount;
return (totalLoadCount == 0) ? 0.0 : (double) loadExceptionCount / totalLoadCount;
}
/**
* Returns the total number of nanoseconds the cache has spent loading new values. This can be
* used to calculate the miss penalty. This value is increased every time {@code loadSuccessCount}
* or {@code loadExceptionCount} is incremented.
*/
@SuppressWarnings("GoodTime") // should return a java.time.Duration
public long totalLoadTime() {
return totalLoadTime;
}
/**
* Returns the average time spent loading new values. This is defined as {@code totalLoadTime /
* (loadSuccessCount + loadExceptionCount)}.
*/
public double averageLoadPenalty() {
long totalLoadCount = loadSuccessCount + loadExceptionCount;
return (totalLoadCount == 0) ? 0.0 : (double) totalLoadTime / totalLoadCount;
}
/**
* Returns the number of times an entry has been evicted. This count does not include manual
* {@linkplain Cache#invalidate invalidations}.
*/
public long evictionCount() {
return evictionCount;
}
/**
* Returns a new {@code CacheStats} representing the difference between this {@code CacheStats}
* and {@code other}. Negative values, which aren't supported by {@code CacheStats} will be
* rounded up to zero.
*/
public CacheStats minus(CacheStats other) {
return new CacheStats(
Math.max(0, hitCount - other.hitCount),
Math.max(0, missCount - other.missCount),
Math.max(0, loadSuccessCount - other.loadSuccessCount),
Math.max(0, loadExceptionCount - other.loadExceptionCount),
Math.max(0, totalLoadTime - other.totalLoadTime),
Math.max(0, evictionCount - other.evictionCount));
}
/**
* Returns a new {@code CacheStats} representing the sum of this {@code CacheStats} and {@code
* other}.
*
* @since 11.0
*/
public CacheStats plus(CacheStats other) {
return new CacheStats(
hitCount + other.hitCount,
missCount + other.missCount,
loadSuccessCount + other.loadSuccessCount,
loadExceptionCount + other.loadExceptionCount,
totalLoadTime + other.totalLoadTime,
evictionCount + other.evictionCount);
}
@Override
public int hashCode() {
return Objects.hashCode(
hitCount, missCount, loadSuccessCount, loadExceptionCount, totalLoadTime, evictionCount);
}
@Override
public boolean equals(@NullableDecl Object object) {
if (object instanceof CacheStats) {
CacheStats other = (CacheStats) object;
return hitCount == other.hitCount
&& missCount == other.missCount
&& loadSuccessCount == other.loadSuccessCount
&& loadExceptionCount == other.loadExceptionCount
&& totalLoadTime == other.totalLoadTime
&& evictionCount == other.evictionCount;
}
return false;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("hitCount", hitCount)
.add("missCount", missCount)
.add("loadSuccessCount", loadSuccessCount)
.add("loadExceptionCount", loadExceptionCount)
.add("totalLoadTime", totalLoadTime)
.add("evictionCount", evictionCount)
.toString();
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.costmanagement;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.AddDatePolicy;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.core.http.policy.HttpLoggingPolicy;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.HttpPolicyProviders;
import com.azure.core.http.policy.RequestIdPolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;
import com.azure.core.management.http.policy.ArmChallengeAuthenticationPolicy;
import com.azure.core.management.profile.AzureProfile;
import com.azure.core.util.Configuration;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.costmanagement.fluent.CostManagementClient;
import com.azure.resourcemanager.costmanagement.implementation.AlertsImpl;
import com.azure.resourcemanager.costmanagement.implementation.CostManagementClientBuilder;
import com.azure.resourcemanager.costmanagement.implementation.DimensionsImpl;
import com.azure.resourcemanager.costmanagement.implementation.ExportsImpl;
import com.azure.resourcemanager.costmanagement.implementation.ForecastsImpl;
import com.azure.resourcemanager.costmanagement.implementation.GenerateReservationDetailsReportsImpl;
import com.azure.resourcemanager.costmanagement.implementation.OperationsImpl;
import com.azure.resourcemanager.costmanagement.implementation.QueriesImpl;
import com.azure.resourcemanager.costmanagement.implementation.SettingsImpl;
import com.azure.resourcemanager.costmanagement.implementation.ViewsImpl;
import com.azure.resourcemanager.costmanagement.models.Alerts;
import com.azure.resourcemanager.costmanagement.models.Dimensions;
import com.azure.resourcemanager.costmanagement.models.Exports;
import com.azure.resourcemanager.costmanagement.models.Forecasts;
import com.azure.resourcemanager.costmanagement.models.GenerateReservationDetailsReports;
import com.azure.resourcemanager.costmanagement.models.Operations;
import com.azure.resourcemanager.costmanagement.models.Queries;
import com.azure.resourcemanager.costmanagement.models.Settings;
import com.azure.resourcemanager.costmanagement.models.Views;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/** Entry point to CostManagementManager. */
public final class CostManagementManager {
private Settings settings;
private Views views;
private Alerts alerts;
private Forecasts forecasts;
private Dimensions dimensions;
private Queries queries;
private GenerateReservationDetailsReports generateReservationDetailsReports;
private Operations operations;
private Exports exports;
private final CostManagementClient clientObject;
private CostManagementManager(HttpPipeline httpPipeline, AzureProfile profile, Duration defaultPollInterval) {
Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
this.clientObject =
new CostManagementClientBuilder()
.pipeline(httpPipeline)
.endpoint(profile.getEnvironment().getResourceManagerEndpoint())
.defaultPollInterval(defaultPollInterval)
.buildClient();
}
/**
* Creates an instance of CostManagement service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the CostManagement service API instance.
*/
public static CostManagementManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
return configure().authenticate(credential, profile);
}
/**
* Gets a Configurable instance that can be used to create CostManagementManager with optional configuration.
*
* @return the Configurable instance allowing configurations.
*/
public static Configurable configure() {
return new CostManagementManager.Configurable();
}
/** The Configurable allowing configurations to be set. */
public static final class Configurable {
private final ClientLogger logger = new ClientLogger(Configurable.class);
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private final List<HttpPipelinePolicy> policies = new ArrayList<>();
private final List<String> scopes = new ArrayList<>();
private RetryPolicy retryPolicy;
private Duration defaultPollInterval;
private Configurable() {
}
/**
* Sets the http client.
*
* @param httpClient the HTTP client.
* @return the configurable object itself.
*/
public Configurable withHttpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Sets the logging options to the HTTP pipeline.
*
* @param httpLogOptions the HTTP log options.
* @return the configurable object itself.
*/
public Configurable withLogOptions(HttpLogOptions httpLogOptions) {
this.httpLogOptions = Objects.requireNonNull(httpLogOptions, "'httpLogOptions' cannot be null.");
return this;
}
/**
* Adds the pipeline policy to the HTTP pipeline.
*
* @param policy the HTTP pipeline policy.
* @return the configurable object itself.
*/
public Configurable withPolicy(HttpPipelinePolicy policy) {
this.policies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Adds the scope to permission sets.
*
* @param scope the scope.
* @return the configurable object itself.
*/
public Configurable withScope(String scope) {
this.scopes.add(Objects.requireNonNull(scope, "'scope' cannot be null."));
return this;
}
/**
* Sets the retry policy to the HTTP pipeline.
*
* @param retryPolicy the HTTP pipeline retry policy.
* @return the configurable object itself.
*/
public Configurable withRetryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null.");
return this;
}
/**
* Sets the default poll interval, used when service does not provide "Retry-After" header.
*
* @param defaultPollInterval the default poll interval.
* @return the configurable object itself.
*/
public Configurable withDefaultPollInterval(Duration defaultPollInterval) {
this.defaultPollInterval = Objects.requireNonNull(defaultPollInterval, "'retryPolicy' cannot be null.");
if (this.defaultPollInterval.isNegative()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'httpPipeline' cannot be negative"));
}
return this;
}
/**
* Creates an instance of CostManagement service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the CostManagement service API instance.
*/
public CostManagementManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
StringBuilder userAgentBuilder = new StringBuilder();
userAgentBuilder
.append("azsdk-java")
.append("-")
.append("com.azure.resourcemanager.costmanagement")
.append("/")
.append("1.0.0-beta.3");
if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) {
userAgentBuilder
.append(" (")
.append(Configuration.getGlobalConfiguration().get("java.version"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.name"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.version"))
.append("; auto-generated)");
} else {
userAgentBuilder.append(" (auto-generated)");
}
if (scopes.isEmpty()) {
scopes.add(profile.getEnvironment().getManagementEndpoint() + "/.default");
}
if (retryPolicy == null) {
retryPolicy = new RetryPolicy("Retry-After", ChronoUnit.SECONDS);
}
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(userAgentBuilder.toString()));
policies.add(new RequestIdPolicy());
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy);
policies.add(new AddDatePolicy());
policies.add(new ArmChallengeAuthenticationPolicy(credential, scopes.toArray(new String[0])));
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
HttpPipeline httpPipeline =
new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
return new CostManagementManager(httpPipeline, profile, defaultPollInterval);
}
}
/** @return Resource collection API of Settings. */
public Settings settings() {
if (this.settings == null) {
this.settings = new SettingsImpl(clientObject.getSettings(), this);
}
return settings;
}
/** @return Resource collection API of Views. */
public Views views() {
if (this.views == null) {
this.views = new ViewsImpl(clientObject.getViews(), this);
}
return views;
}
/** @return Resource collection API of Alerts. */
public Alerts alerts() {
if (this.alerts == null) {
this.alerts = new AlertsImpl(clientObject.getAlerts(), this);
}
return alerts;
}
/** @return Resource collection API of Forecasts. */
public Forecasts forecasts() {
if (this.forecasts == null) {
this.forecasts = new ForecastsImpl(clientObject.getForecasts(), this);
}
return forecasts;
}
/** @return Resource collection API of Dimensions. */
public Dimensions dimensions() {
if (this.dimensions == null) {
this.dimensions = new DimensionsImpl(clientObject.getDimensions(), this);
}
return dimensions;
}
/** @return Resource collection API of Queries. */
public Queries queries() {
if (this.queries == null) {
this.queries = new QueriesImpl(clientObject.getQueries(), this);
}
return queries;
}
/** @return Resource collection API of GenerateReservationDetailsReports. */
public GenerateReservationDetailsReports generateReservationDetailsReports() {
if (this.generateReservationDetailsReports == null) {
this.generateReservationDetailsReports =
new GenerateReservationDetailsReportsImpl(clientObject.getGenerateReservationDetailsReports(), this);
}
return generateReservationDetailsReports;
}
/** @return Resource collection API of Operations. */
public Operations operations() {
if (this.operations == null) {
this.operations = new OperationsImpl(clientObject.getOperations(), this);
}
return operations;
}
/** @return Resource collection API of Exports. */
public Exports exports() {
if (this.exports == null) {
this.exports = new ExportsImpl(clientObject.getExports(), this);
}
return exports;
}
/**
* @return Wrapped service client CostManagementClient providing direct access to the underlying auto-generated API
* implementation, based on Azure REST API.
*/
public CostManagementClient serviceClient() {
return this.clientObject;
}
}
| |
/**
* com.planet_ink.coffee_mud.core.intermud.i3.net.Interactive
* Copyright (c) 1996 George Reese
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The Imaginary interactive implementation
* of interactive connections.
*/
package com.planet_ink.coffee_mud.core.intermud.i3.net;
import com.planet_ink.coffee_mud.core.intermud.i3.server.I3Server;
import com.planet_ink.coffee_mud.core.intermud.i3.server.ServerUser;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import java.io.*;
import java.net.Socket;
import java.util.Date;
import java.util.List;
import java.util.Vector;
/**
* This class provides an implementation of the Imaginary server
* interactive module. It is responsible for handling the login
* of an individual user and processing its input as directed
* by the server.
* Created: 27 September 1996
* Last modified: 27 September 1996
* @author George Reese (borg@imaginary.com)
* @version 1.0
*/
@SuppressWarnings({"unchecked","rawtypes"})
public abstract class Interactive implements ServerUser {
/**
* Given a user name, this method will build a unique
* key. This unique key has nothing to do with the
* unique object id. The idea behind a key name is to
* ensure that you do not end up with a user named
* Descartes and a user named Des Cartes. It removes
* all non-alphabetic characters and makes the string
* lower case.
* @exception InvalidNameException thrown if the name produces an unuseable key
* @param nom the visual name to create a key from
* @return the new key
* @throws InvalidNameException an error telling you to pick a new name
*/
static public String createKeyName(String nom) throws InvalidNameException
{
final StringBuffer buff = new StringBuffer(nom.toLowerCase());
String key = "";
int i;
for(i=0; i<buff.length(); i++)
{
final char c = buff.charAt(i);
if( c >= 'a' && c <= 'z' )
{
key = key + c;
}
else if( c != '\'' && c != '-' && c != ' ' )
{
throw new InvalidNameException(c + " is an invalid character for names.");
}
}
if( key.length() < 3 )
{
throw new InvalidNameException("Your name must have at least three alphabetic characters.");
}
else if( key.length() > 29 )
{
throw new InvalidNameException("Your name is too long.");
}
return key;
}
/**
* Given a user name, this method will find the Interactive
* instance associated with that user name. If no such user is
* currently logged in, this method will return null.
* @param nom the name of the desired user
* @return the Interactive object for the specified name or null if no such user exists
*/
static public Interactive findUser(String nom)
{
final ServerUser[] users = I3Server.getInteractives();
int i;
try
{
nom = createKeyName(nom);
}
catch( final InvalidNameException e )
{
return null;
}
for(i=0; i<users.length; i++)
{
final Interactive user = (Interactive)users[i];
if( user.getKeyName().equals(nom) )
{
return user;
}
}
return null;
}
private InteractiveBody body;
private Date current_login_time;
private boolean destructed;
private String display_name;
private String email;
private InputThread input_thread;
private String key_name;
private Date last_command_time;
private String last_login_site;
private Date last_login_time;
private String object_id;
private PrintStream output_stream;
private String password;
private String real_name;
private final Vector redirect;
private Socket socket;
/**
* Constructs a new interactive object and initializes
* its data.
*/
public Interactive()
{
super();
destructed = false;
input_thread = null;
object_id = null;
output_stream = null;
redirect = new Vector();
}
/**
* Implementation of the ServerUser connect method.
* A mudlib will want to display a welcome screen
* and ask for a user name by extending this method.
* Here, the login time is set.
*/
@Override
public synchronized void connect()
{
current_login_time = new Date();
last_command_time = new Date();
}
/**
* Stops any running I/O threads for this interactive, closes the
* user socket, and marks the object for destruction according to
* the requirements of the ServerObject interface.
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerObject#getDestructed
*/
@Override
public synchronized void destruct()
{
output_stream.flush();
if( input_thread != null )
{
input_thread.stop();
input_thread = null;
}
try
{
if(socket!=null)
socket.close();
}
catch( final java.io.IOException e )
{
Log.errOut("IMInteractive",e);
}
destructed = true;
}
/**
* Called whenever a command is pulled off the incoming
* command stack. If there is an instance of the Input
* class to which input is supposed to be redirected,
* then the command is sent there. Otherwise it is sent
* to the parser. Muds wishing to implement their own
* parser system should
* @param cmd the command to be executed
* @see #processInput
*/
protected synchronized void input(String cmd)
{
Input ob = null;
if( redirect.size() > 0 )
{
ob = (Input)redirect.elementAt(0);
redirect.removeElementAt(0);
}
if( ob != null )
{
ob.input(this, cmd);
}
else if( body != null )
{
body.executeCommand(cmd);
}
if( redirect.size() < 1 )
{
sendMessage(getPrompt(), true);
}
}
/**
* This method is triggered by the input thread when it detects
* that the user has lost their link. It will tell the body object
* that the link is lost, then destruct itself.
*/
protected void loseLink()
{
socket = null;
if( body != null )
{
body.loseLink();
}
destruct();
}
/**
* Does event handling for the user object. Each
* server cycle, the server triggers this method. If
* the user has periodic events which occur to it,
* the event processor will flag that the event() method
* should be called.
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerObject#processEvent
*/
@Override
public void processEvent()
{
}
/**
* The server triggers this method once each server cycle to see
* if the user has any input waiting to be processed. This method
* checks the input queue. If there is input waiting, it updates
* the last command time and calls the input() method with the
* waiting command. Otherwise it simply returns.
* @see #input
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerUser#processInput
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerThread#tick(com.planet_ink.coffee_mud.core.interfaces.Tickable, int)
*/
@Override
public synchronized final void processInput()
{
if( input_thread != null )
{
final String msg = input_thread.nextMessage();
if( msg != null )
{
last_command_time = new java.util.Date();
input(msg);
}
}
}
/**
* Redirects user input to the input object passed to it.
* This will create a LIFO chain of input redirection. For
* example, if I have my input currently redirected to a
* mud created editor, then I wish to get help from inside
* the editor, my next input will be directed to the help
* prompt. If I enter something at that point with no further
* input redirection, my next input will then go back to the
* editor.
* @param ob the instance of com.planet_ink.coffee_mud.core.intermud.i3.net.Input to which input will be redirected
* @see com.planet_ink.coffee_mud.core.intermud.i3.net.Input
* @see #input
*/
public synchronized final void redirectInput(Input ob)
{
redirect.addElement(ob);
}
/**
* Sends a message across to the client with a newline appended
* to the message.
* @param msg the message to send to the client machine
*/
public final void sendMessage(String msg)
{
if( socket == null )
{
return;
}
sendMessage(msg, false);
}
/**
* Sends a message across to the client. It will append
* nowrap is true, no newline will be appended.
* @param msg the message to send to the client
* @param nowrap if true, no newline is attached
*/
public final void sendMessage(String msg, boolean nowrap)
{
if( !nowrap )
{
msg += "\n";
}
output_stream.print(msg);
output_stream.flush();
}
/**
* Validates a user password against a random string.
* @param other the password to check
* @return true if the two passwords match
*/
public final boolean validatePassword(String other)
{
return other.equals(password);
}
/**
* Provides the address from which this user is connected.
* @return the host name for this user's current site
*/
public final String getAddressName()
{
if(CMProps.getVar(CMProps.Str.MUDDOMAIN).length()>0)
return CMProps.getVar(CMProps.Str.MUDDOMAIN).toLowerCase();
return socket.getInetAddress().getHostName();
}
/**
* Provides the body to which this user is connected.
* @return the body to which this user is connected, or null if no body exists
*/
public final InteractiveBody getBody()
{
return body;
}
/**
* Sets the body to which this interactive connection
* is connected. Any mudlib using this system for
* interactive management must implement the InteractiveBody
* interface for any body to be used by a user.
* @param ob the body to which this interactive is being connected
* @see com.planet_ink.coffee_mud.core.intermud.i3.net.InteractiveBody
*/
public void setBody(InteractiveBody ob)
{
body = ob;
}
/**
* Provides the time at which the user logged in for this session
* @return the time of login for the current session
*/
public final Date getCurrentLoginTime()
{
return current_login_time;
}
/**
* Tells whether or not the user is marked for destruction.
* @return true if the user is marked for destruction
*/
@Override
public boolean getDestructed()
{
return destructed;
}
/**
* Provides the user's name as they wish it to appear
* with mixed capitalization, spaces, hyphens, etc.
* @return the user's display name
*/
public String getDisplayName()
{
return display_name;
}
/**
* Sets the user's display name. Prevents the operation
* if the display name is not a permutation of the key
* name.
* @param str the new display name
*/
public final void setDisplayName(String str)
{
try
{
if( !getKeyName().equals(Interactive.createKeyName(str)) )
{
return;
}
display_name = str;
}
catch( final InvalidNameException e )
{
return;
}
}
/**
* Provides the user's email address
* @return the email address for this user
*/
public final String getEmail()
{
return email;
}
/**
* Sets the user's email address
* @param str the new email address
*/
public final void setEmail(String str)
{
email = str;
}
/**
* Provides the number of seconds which have elapsed since the user
* last entered a command.
* @return the idle time in seconds
*/
public final int getIdle()
{
return (int)(((new Date()).getTime() - last_command_time.getTime())/1000);
}
/**
* Provides the key name for this user. The key name is a
* play on the user name to create a unique identifier for this
* user that will always work. For example, the following
* command should work for a user:
*
* tell descartes hi!
* tell deScartes hi!
* tell des cartes hi!
*
* The key name thus creates a common denomenator to which a name
* can be reduced for comparison.
* @see #createKeyName
* @return the key name
*/
public final String getKeyName()
{
return key_name;
}
/**
* Sets the key name during user creation. This prevents resetting
* of the key name.
* @param str the key name being set
* @see #getKeyName
*/
protected void setKeyName(String str)
{
if( key_name != null )
{
return;
}
key_name = str;
}
/**
* Provides the name of the site from which the user logged in
* at their last login.
* @return the last login site
*/
public final String getLastLoginSite()
{
return last_login_site;
}
/**
* Sets the last login site. Used by a subclass
* during login.
* @param site the last login site
*/
public void setLastLoginSite(String site)
{
if( last_login_site != null )
{
return;
}
last_login_site = site;
}
/**
* Provides the time of the user's last login.
* @return the last login time
*/
public final Date getLastLoginTime()
{
return last_login_time;
}
/**
* Used by the login process to set the last login
* time.
* @param time the time the user last logged in
*/
public void setLastLoginTime(Date time)
{
if( last_login_time != null )
{
return;
}
last_login_time = time;
}
/**
* Gives the user object's object id.
* @return the object id
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerObject#getObjectId
*/
@Override
public final String getObjectId()
{
return object_id;
}
/**
* Allows the server to set the object id.
* @param id the object id assigned to this object
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerObject#setObjectId
*/
@Override
public final void setObjectId(String id)
{
if( object_id != null )
{
return;
}
object_id = id;
}
/**
* Allows a subclass to get the password.
* @return the user's password
*/
protected String getPassword()
{
return password;
}
/**
* Sets the user's password.
* @param pass the new password
*/
protected void setPassword(String pass)
{
password = pass;
}
/**
* Provides the user's command prompt.
* @return the command prompt
*/
public String getPrompt()
{
return "> ";
}
/**
* Provides the user's real name, or null if they never entered
* a real name.
* @return the user's real name or null
*/
public final String getRealName()
{
return real_name;
}
/**
* Sets the user's real name.
* @param nom the real name for the user
*/
public void setRealName(String nom)
{
real_name = nom;
}
/**
* Called by the server before connect() is called to assign
* the socket for this Interactive to it.
* @param s the socket for this connection
* @see com.planet_ink.coffee_mud.core.intermud.i3.server.ServerUser#setSocket
*/
@Override
public final void setSocket(Socket s) throws java.io.IOException
{
socket = s;
input_thread = new InputThread(socket, this);
output_stream = new PrintStream(s.getOutputStream());
}
}
/**
* The InputThread class handles asynchronous user input and queues
* it up to be picked up by the user synchronously. In English,
* the user can be entering information at any point in time
* while the server is running. You want, however, that a command
* be executed in a specific order. This class therefore stuffs commands
* into a queue when they arrive. When the user is ready, it pulls a
* single command off to be executed.
* Created: 27 September 1996
* Last modified 27 September 1996
* @author George Reese (borg@imaginary.com)
* @version 1.0
* @see com.planet_ink.coffee_mud.core.intermud.i3.net.Interactive
*/
@SuppressWarnings({"unchecked","rawtypes"})
class InputThread implements Runnable
{
private final List<String> input_buffer;
private final BufferedReader stream;
private boolean destructed;
private final Thread thread;
private final Interactive user;
private volatile long internalSize=0;
/**
* Constructs and starts the thread which accepts user
* input. As a user enters a command, the command is
* added to a input_buffer. During each server cycle, the
* Interactive object for this thread pulls off one
* command and executes it.
* @exception java.io.IOException thrown if no input stream can be created
* @param s the socket connected to the user's machine
* @param u the Interactive attached to this thread
*/
public InputThread(Socket s, Interactive u) throws java.io.IOException {
destructed = false;
user = u;
input_buffer = new Vector(10);
stream = new java.io.BufferedReader(new java.io.InputStreamReader(s.getInputStream()));
thread = new Thread(Thread.currentThread().getThreadGroup(),this,"I3_"+Thread.currentThread().getName());
thread.setDaemon(true);
thread.start();
}
/**
* As long as the user is connected, this thread accepts
* input from the user machine. If the user drops link,
* this will call loseLink() in the interactive object.
* @see com.planet_ink.coffee_mud.core.intermud.i3.net.Interactive#loseLink
*/
@Override
public void run()
{
while( !destructed )
{
String msg;
try
{
msg = stream.readLine();
}
catch( final java.io.IOException e )
{
synchronized( user )
{
user.loseLink();
}
return;
}
synchronized( this )
{
if(msg != null)
{
input_buffer.add(msg);
internalSize+=(msg.length()*2);
}
}
if(internalSize > (10 * 1024 * 1024))
{
Log.errOut("Excessive buffer size: "+internalSize);
}
try { Thread.sleep(10); }
catch( final InterruptedException e ) { }
}
}
/**
* The interactive object for this input thread will
* call stop if the interactive is destructed for
* any reason.
*/
public void stop()
{
destructed = true;
CMLib.killThread(thread,500,1);
input_buffer.clear();
}
protected synchronized String nextMessage()
{
String msg;
synchronized( input_buffer )
{
if( input_buffer.size() > 0 )
{
msg = input_buffer.remove(0);
internalSize-=(msg.length()*2);
}
else
{
msg = null;
}
}
return msg;
}
}
| |
/*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.executor.impl.wih;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.jbpm.bpmn2.handler.ServiceTaskHandler;
import org.jbpm.executor.ExecutorServiceFactory;
import org.jbpm.process.core.async.AsyncSignalEventCommand;
import org.jbpm.process.instance.impl.demo.SystemOutWorkItemHandler;
import org.jbpm.runtime.manager.impl.DefaultRegisterableItemsFactory;
import org.jbpm.services.task.identity.JBossUserGroupCallbackImpl;
import org.jbpm.test.util.AbstractExecutorBaseTest;
import org.jbpm.test.util.CountDownProcessEventListener;
import org.jbpm.test.util.ExecutorTestUtil;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.kie.api.event.process.ProcessEventListener;
import org.kie.api.executor.ExecutorService;
import org.kie.api.executor.RequestInfo;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.manager.RuntimeEngine;
import org.kie.api.runtime.manager.RuntimeEnvironment;
import org.kie.api.runtime.manager.RuntimeEnvironmentBuilder;
import org.kie.api.runtime.manager.RuntimeManager;
import org.kie.api.runtime.manager.RuntimeManagerFactory;
import org.kie.api.runtime.manager.audit.NodeInstanceLog;
import org.kie.api.runtime.process.ProcessInstance;
import org.kie.api.runtime.process.WorkItemHandler;
import org.kie.api.runtime.query.QueryContext;
import org.kie.api.task.UserGroupCallback;
import org.kie.api.task.model.TaskSummary;
import org.kie.internal.io.ResourceFactory;
import org.kie.internal.runtime.manager.RuntimeManagerRegistry;
import org.kie.internal.runtime.manager.context.EmptyContext;
import bitronix.tm.resource.jdbc.PoolingDataSource;
public class AsyncContinuationSupportTest extends AbstractExecutorBaseTest {
private PoolingDataSource pds;
private UserGroupCallback userGroupCallback;
private RuntimeManager manager;
private ExecutorService executorService;
private EntityManagerFactory emf = null;
private long delay = 1000;
@Before
public void setup() {
ExecutorTestUtil.cleanupSingletonSessionId();
pds = ExecutorTestUtil.setupPoolingDataSource();
Properties properties= new Properties();
properties.setProperty("mary", "HR");
properties.setProperty("john", "HR");
userGroupCallback = new JBossUserGroupCallbackImpl(properties);
executorService = buildExecutorService();
}
@After
public void teardown() {
executorService.destroy();
if (manager != null) {
RuntimeManagerRegistry.get().remove(manager.getIdentifier());
manager.close();
}
if (emf != null) {
emf.close();
}
pds.close();
}
@Test(timeout=10000)
public void testAsyncScriptTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncScriptTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("AsyncScriptTask");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
}
@Test
public void testNoAsyncServiceAvailableScriptTask() throws Exception {
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncScriptTask.bpmn2"), ResourceType.BPMN2)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("AsyncScriptTask");
long processInstanceId = processInstance.getId();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
}
@Test(timeout=10000)
public void testAsyncServiceTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncServiceProcess.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
handlers.put("Service Task", new ServiceTaskHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
Map<String, Object> params = new HashMap<String, Object>();
params.put("s", "john");
ProcessInstance processInstance = ksession.startProcess("AsyncServiceProcess", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(6, logs.size());
}
@Test(timeout=10000)
public void testAsyncMIUserTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1, true);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-MultiInstanceLoopCharacteristicsTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ArrayList<String> items = new ArrayList<String>();
items.add("one");
items.add("two");
items.add("three");
Map<String, Object> params = new HashMap<String, Object>();
params.put("list", items);
ProcessInstance processInstance = ksession.startProcess("MultiInstanceLoopCharacteristicsTask", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
List<TaskSummary> tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK");
assertNotNull(tasks);
assertEquals(1, tasks.size());
countDownListener.reset(1);
countDownListener.waitTillCompleted();
tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK");
assertNotNull(tasks);
assertEquals(2, tasks.size());
countDownListener.reset(1);
countDownListener.waitTillCompleted();
tasks = runtime.getTaskService().getTasksAssignedAsPotentialOwner("john", "en-UK");
assertNotNull(tasks);
assertEquals(3, tasks.size());
for (TaskSummary task : tasks) {
runtime.getTaskService().start(task.getId(), "john");
runtime.getTaskService().complete(task.getId(), "john", null);
}
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(12, logs.size());
}
@Test(timeout=10000)
public void testAsyncMISubProcess() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-MultiInstanceLoopCharacteristicsProcess.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ArrayList<String> items = new ArrayList<String>();
items.add("one");
items.add("two");
items.add("three");
Map<String, Object> params = new HashMap<String, Object>();
params.put("list", items);
ProcessInstance processInstance = ksession.startProcess("MultiInstanceLoopCharacteristicsProcess", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset(1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset(1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(26, logs.size());
}
@Test(timeout=10000)
public void testAsyncSubProcess() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncSubProcess.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
Map<String, Object> params = new HashMap<String, Object>();
ProcessInstance processInstance = ksession.startProcess("AsyncSubProcess", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(18, logs.size());
}
@Test(timeout=10000)
public void testSubProcessWithAsyncNodes() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello1", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-SubProcessAsyncNodes.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
Map<String, Object> params = new HashMap<String, Object>();
ProcessInstance processInstance = ksession.startProcess("SubProcess", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset("Hello2", 1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset("Hello3", 1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
Thread.sleep(delay);
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(18, logs.size());
}
@Test(timeout=10000)
public void testSubProcessWithSomeAsyncNodes() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello2", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-SubProcessSomeAsyncNodes.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
Map<String, Object> params = new HashMap<String, Object>();
ProcessInstance processInstance = ksession.startProcess("SubProcess", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset("Goodbye", 1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(18, logs.size());
}
@Test(timeout=10000)
public void testAsyncCallActivityTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("CallActivity", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-ScriptTask.bpmn2"), ResourceType.BPMN2)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-CallActivity.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("ParentProcess");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(6, logs.size());
}
@Test(timeout=10000)
public void testAsyncAndSyncServiceTasks() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Async Service", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncServiceTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
handlers.put("Service Task", new ServiceTaskHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
Map<String, Object> params = new HashMap<String, Object>();
params.put("name", "john");
ProcessInstance processInstance = ksession.startProcess("async-cont.async-service-task", params);
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset(1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNotNull(processInstance);
countDownListener.reset(1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstance.getId());
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(14, logs.size());
}
@Test(timeout=10000)
public void testAsyncScriptTaskIgnoreNotExistingDeployments() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("Hello", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncScriptTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment, "special-test-case");
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("AsyncScriptTask");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
manager.close();
List<RequestInfo> queued = executorService.getQueuedRequests(new QueryContext());
assertNotNull(queued);
assertEquals(1, queued.size());
assertEquals(AsyncSignalEventCommand.class.getName(), queued.get(0).getCommandName());
countDownListener.waitTillCompleted(2000);
queued = executorService.getQueuedRequests(new QueryContext());
assertNotNull(queued);
assertEquals(1, queued.size());
assertEquals(AsyncSignalEventCommand.class.getName(), queued.get(0).getCommandName());
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment, "special-test-case");
assertNotNull(manager);
runtime = manager.getRuntimeEngine(EmptyContext.get());
countDownListener.reset(1);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
}
@Test(timeout=10000)
public void testAsyncModeWithScriptTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("EndProcess", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-ScriptTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.addEnvironmentEntry("AsyncMode", "true")
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners( RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("ScriptTask");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
List<RequestInfo> completed = executorService.getCompletedRequests(new QueryContext());
// there should 3 completed commands (for script, for task and end node)
assertEquals(3, completed.size());
Set<String> commands = completed.stream().map(RequestInfo::getCommandName).collect(Collectors.toSet());
assertEquals(1, commands.size());
assertEquals(AsyncSignalEventCommand.class.getName(), commands.iterator().next());
}
@Test(timeout=10000)
public void testAsyncModeWithAsyncScriptTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("EndProcess", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-AsyncScriptTask.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.addEnvironmentEntry("AsyncMode", "true")
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("async", new SystemOutWorkItemHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("AsyncScriptTask");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
List<RequestInfo> completed = executorService.getCompletedRequests(new QueryContext());
// there should 3 completed commands (for script, for task and end node)
assertEquals(3, completed.size());
Set<String> commands = completed.stream().map(RequestInfo::getCommandName).collect(Collectors.toSet());
assertEquals(1, commands.size());
assertEquals(AsyncSignalEventCommand.class.getName(), commands.iterator().next());
}
@Test(timeout=10000)
public void testAsyncModeWithServiceTask() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("EndProcess", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-ServiceProcess.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.addEnvironmentEntry("AsyncMode", "true")
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public Map<String, WorkItemHandler> getWorkItemHandlers(RuntimeEngine runtime) {
Map<String, WorkItemHandler> handlers = super.getWorkItemHandlers(runtime);
handlers.put("Service Task", new ServiceTaskHandler());
return handlers;
}
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("ServiceProcess");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(6, logs.size());
List<RequestInfo> completed = executorService.getCompletedRequests(new QueryContext());
// there should be 2 completed commands (for service task and end node)
assertEquals(2, completed.size());
Set<String> commands = completed.stream().map(RequestInfo::getCommandName).collect(Collectors.toSet());
assertEquals(1, commands.size());
assertEquals(AsyncSignalEventCommand.class.getName(), commands.iterator().next());
}
@Test(timeout=10000)
public void testAsyncModeWithSubProcess() throws Exception {
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("EndProcess", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-SubProcess.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.addEnvironmentEntry("AsyncMode", "true")
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("SubProcess");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be through executor/async
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(18, logs.size());
List<RequestInfo> completed = executorService.getCompletedRequests(new QueryContext());
// there should be 7 completed commands (subprocess node itself, 3 inner script tasks, subprocess end node, outer script task, process end node)
assertEquals(7, completed.size());
Set<String> commands = completed.stream().map(RequestInfo::getCommandName).collect(Collectors.toSet());
assertEquals(1, commands.size());
assertEquals(AsyncSignalEventCommand.class.getName(), commands.iterator().next());
}
@Test(timeout=10000)
public void testAsyncModeWithSignalProcess() throws Exception {
final CountDownProcessEventListener countDownListenerSignalAsync = new CountDownProcessEventListener("Signal", 1, true);
final CountDownProcessEventListener countDownListener = new CountDownProcessEventListener("EndProcess", 1);
RuntimeEnvironment environment = RuntimeEnvironmentBuilder.Factory.get().newDefaultBuilder()
.userGroupCallback(userGroupCallback)
.addAsset(ResourceFactory.newClassPathResource("BPMN2-WaitForEvent.bpmn2"), ResourceType.BPMN2)
.addEnvironmentEntry("ExecutorService", executorService)
.addEnvironmentEntry("AsyncMode", "true")
.registerableItemsFactory(new DefaultRegisterableItemsFactory() {
@Override
public List<ProcessEventListener> getProcessEventListeners(RuntimeEngine runtime) {
List<ProcessEventListener> listeners = super.getProcessEventListeners(runtime);
listeners.add(countDownListener);
listeners.add(countDownListenerSignalAsync);
return listeners;
}
})
.get();
manager = RuntimeManagerFactory.Factory.get().newSingletonRuntimeManager(environment);
assertNotNull(manager);
RuntimeEngine runtime = manager.getRuntimeEngine(EmptyContext.get());
KieSession ksession = runtime.getKieSession();
assertNotNull(ksession);
ProcessInstance processInstance = ksession.startProcess("WaitForEvent");
assertEquals(ProcessInstance.STATE_ACTIVE, processInstance.getState());
long processInstanceId = processInstance.getId();
// make sure that waiting for event process is not finished yet as it must be signalled first
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNotNull(processInstance);
// wait for the signal not to be triggered in async way before sending signal
countDownListenerSignalAsync.waitTillCompleted();
// Send async signal to the process instance
System.out.println("<<<< Sending signal >>>>>");
runtime.getKieSession().signalEvent("MySignal", null);
countDownListener.waitTillCompleted();
processInstance = runtime.getKieSession().getProcessInstance(processInstanceId);
assertNull(processInstance);
List<? extends NodeInstanceLog> logs = runtime.getAuditService().findNodeInstances(processInstanceId);
assertNotNull(logs);
assertEquals(8, logs.size());
List<RequestInfo> completed = executorService.getCompletedRequests(new QueryContext());
// there should be 3 completed commands (for intermediate catch event, script task and end node)
assertEquals(3, completed.size());
Set<String> commands = completed.stream().map(RequestInfo::getCommandName).collect(Collectors.toSet());
assertEquals(1, commands.size());
assertEquals(AsyncSignalEventCommand.class.getName(), commands.iterator().next());
}
private ExecutorService buildExecutorService() {
emf = Persistence.createEntityManagerFactory("org.jbpm.executor");
executorService = ExecutorServiceFactory.newExecutorService(emf);
executorService.setInterval((int)delay);
executorService.setTimeunit(TimeUnit.MILLISECONDS);
executorService.init();
// let the executor start worker threads
try {
Thread.sleep(1500);
} catch (InterruptedException e) {
}
return executorService;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spi.block;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Optional;
import java.util.function.BiConsumer;
import static com.facebook.presto.spi.block.BlockUtil.checkArrayRange;
import static com.facebook.presto.spi.block.BlockUtil.checkValidRegion;
import static com.facebook.presto.spi.block.BlockUtil.compactArray;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.Math.toIntExact;
public class LongArrayBlock
implements Block
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(LongArrayBlock.class).instanceSize();
private final int arrayOffset;
private final int positionCount;
@Nullable
private final boolean[] valueIsNull;
private final long[] values;
private final long sizeInBytes;
private final long retainedSizeInBytes;
public LongArrayBlock(int positionCount, Optional<boolean[]> valueIsNull, long[] values)
{
this(0, positionCount, valueIsNull.orElse(null), values);
}
LongArrayBlock(int arrayOffset, int positionCount, boolean[] valueIsNull, long[] values)
{
if (arrayOffset < 0) {
throw new IllegalArgumentException("arrayOffset is negative");
}
this.arrayOffset = arrayOffset;
if (positionCount < 0) {
throw new IllegalArgumentException("positionCount is negative");
}
this.positionCount = positionCount;
if (values.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("values length is less than positionCount");
}
this.values = values;
if (valueIsNull != null && valueIsNull.length - arrayOffset < positionCount) {
throw new IllegalArgumentException("isNull length is less than positionCount");
}
this.valueIsNull = valueIsNull;
sizeInBytes = (Long.BYTES + Byte.BYTES) * (long) positionCount;
retainedSizeInBytes = INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values);
}
@Override
public long getSizeInBytes()
{
return sizeInBytes;
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
return (Long.BYTES + Byte.BYTES) * (long) length;
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, sizeOf(values));
if (valueIsNull != null) {
consumer.accept(valueIsNull, sizeOf(valueIsNull));
}
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public long getLong(int position, int offset)
{
checkReadablePosition(position);
if (offset != 0) {
throw new IllegalArgumentException("offset must be zero");
}
return values[position + arrayOffset];
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public int getInt(int position, int offset)
{
checkReadablePosition(position);
if (offset != 0) {
throw new IllegalArgumentException("offset must be zero");
}
return toIntExact(values[position + arrayOffset]);
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public short getShort(int position, int offset)
{
checkReadablePosition(position);
if (offset != 0) {
throw new IllegalArgumentException("offset must be zero");
}
short value = (short) (values[position + arrayOffset]);
if (value != values[position + arrayOffset]) {
throw new ArithmeticException("short overflow");
}
return value;
}
@Override
@Deprecated
// TODO: Remove when we fix intermediate types on aggregations.
public byte getByte(int position, int offset)
{
checkReadablePosition(position);
if (offset != 0) {
throw new IllegalArgumentException("offset must be zero");
}
byte value = (byte) (values[position + arrayOffset]);
if (value != values[position + arrayOffset]) {
throw new ArithmeticException("byte overflow");
}
return value;
}
@Override
public boolean mayHaveNull()
{
return valueIsNull != null;
}
@Override
public boolean isNull(int position)
{
checkReadablePosition(position);
return valueIsNull != null && valueIsNull[position + arrayOffset];
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
checkReadablePosition(position);
blockBuilder.writeLong(values[position + arrayOffset]);
blockBuilder.closeEntry();
}
@Override
public Block getSingleValueBlock(int position)
{
checkReadablePosition(position);
return new LongArrayBlock(
0,
1,
isNull(position) ? new boolean[] {true} : null,
new long[] {values[position + arrayOffset]});
}
@Override
public Block copyPositions(int[] positions, int offset, int length)
{
checkArrayRange(positions, offset, length);
boolean[] newValueIsNull = null;
if (valueIsNull != null) {
newValueIsNull = new boolean[length];
}
long[] newValues = new long[length];
for (int i = 0; i < length; i++) {
int position = positions[offset + i];
checkReadablePosition(position);
if (valueIsNull != null) {
newValueIsNull[i] = valueIsNull[position + arrayOffset];
}
newValues[i] = values[position + arrayOffset];
}
return new LongArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public Block getRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
return new LongArrayBlock(positionOffset + arrayOffset, length, valueIsNull, values);
}
@Override
public Block copyRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
positionOffset += arrayOffset;
boolean[] newValueIsNull = valueIsNull == null ? null : compactArray(valueIsNull, positionOffset, length);
long[] newValues = compactArray(values, positionOffset, length);
if (newValueIsNull == valueIsNull && newValues == values) {
return this;
}
return new LongArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public String getEncodingName()
{
return LongArrayBlockEncoding.NAME;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("LongArrayBlock{");
sb.append("positionCount=").append(getPositionCount());
sb.append('}');
return sb.toString();
}
private void checkReadablePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.IgniteSystemProperties;
import org.apache.ignite.cache.store.CacheStoreSessionListener;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.TransactionConfiguration;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.managers.communication.GridIoManager;
import org.apache.ignite.internal.managers.deployment.GridDeploymentManager;
import org.apache.ignite.internal.managers.discovery.GridDiscoveryManager;
import org.apache.ignite.internal.managers.eventstorage.GridEventStorageManager;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.jta.CacheJtaManagerAdapter;
import org.apache.ignite.internal.processors.cache.store.CacheStoreManager;
import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx;
import org.apache.ignite.internal.processors.cache.transactions.IgniteTxManager;
import org.apache.ignite.internal.processors.cache.transactions.TransactionMetricsAdapter;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersionManager;
import org.apache.ignite.internal.processors.timeout.GridTimeoutProcessor;
import org.apache.ignite.internal.util.GridLongList;
import org.apache.ignite.internal.util.future.GridCompoundFuture;
import org.apache.ignite.internal.util.future.GridFinishedFuture;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.marshaller.Marshaller;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_LOCAL_STORE_KEEPS_PRIMARY_ONLY;
/**
* Shared context.
*/
@GridToStringExclude
public class GridCacheSharedContext<K, V> {
/** Kernal context. */
private GridKernalContext kernalCtx;
/** Managers in starting order. */
private List<GridCacheSharedManager<K, V>> mgrs = new LinkedList<>();
/** Cache transaction manager. */
private IgniteTxManager txMgr;
/** JTA manager. */
private CacheJtaManagerAdapter jtaMgr;
/** Partition exchange manager. */
private GridCachePartitionExchangeManager<K, V> exchMgr;
/** Version manager. */
private GridCacheVersionManager verMgr;
/** Lock manager. */
private GridCacheMvccManager mvccMgr;
/** IO Manager. */
private GridCacheIoManager ioMgr;
/** Deployment manager. */
private GridCacheDeploymentManager<K, V> depMgr;
/** Affinity manager. */
private CacheAffinitySharedManager affMgr;
/** Cache contexts map. */
private ConcurrentMap<Integer, GridCacheContext<K, V>> ctxMap;
/** Tx metrics. */
private volatile TransactionMetricsAdapter txMetrics;
/** Store session listeners. */
private Collection<CacheStoreSessionListener> storeSesLsnrs;
/** Local store count. */
private final AtomicInteger locStoreCnt;
/** Indicating whether local store keeps primary only. */
private final boolean locStorePrimaryOnly = IgniteSystemProperties.getBoolean(IGNITE_LOCAL_STORE_KEEPS_PRIMARY_ONLY);
/** */
private final IgniteLogger msgLog;
/** */
private final IgniteLogger atomicMsgLog;
/** */
private final IgniteLogger txPrepareMsgLog;
/** */
private final IgniteLogger txFinishMsgLog;
/** */
private final IgniteLogger txLockMsgLog;
/** */
private final IgniteLogger txRecoveryMsgLog;
/**
* @param kernalCtx Context.
* @param txMgr Transaction manager.
* @param verMgr Version manager.
* @param mvccMgr MVCC manager.
* @param depMgr Deployment manager.
* @param exchMgr Exchange manager.
* @param affMgr Affinity manager.
* @param ioMgr IO manager.
* @param jtaMgr JTA manager.
* @param storeSesLsnrs Store session listeners.
*/
public GridCacheSharedContext(
GridKernalContext kernalCtx,
IgniteTxManager txMgr,
GridCacheVersionManager verMgr,
GridCacheMvccManager mvccMgr,
GridCacheDeploymentManager<K, V> depMgr,
GridCachePartitionExchangeManager<K, V> exchMgr,
CacheAffinitySharedManager<K, V> affMgr,
GridCacheIoManager ioMgr,
CacheJtaManagerAdapter jtaMgr,
Collection<CacheStoreSessionListener> storeSesLsnrs
) {
this.kernalCtx = kernalCtx;
setManagers(mgrs, txMgr, jtaMgr, verMgr, mvccMgr, depMgr, exchMgr, affMgr, ioMgr);
this.storeSesLsnrs = storeSesLsnrs;
txMetrics = new TransactionMetricsAdapter();
ctxMap = new ConcurrentHashMap<>();
locStoreCnt = new AtomicInteger();
msgLog = kernalCtx.log(CU.CACHE_MSG_LOG_CATEGORY);
atomicMsgLog = kernalCtx.log(CU.ATOMIC_MSG_LOG_CATEGORY);
txPrepareMsgLog = kernalCtx.log(CU.TX_MSG_PREPARE_LOG_CATEGORY);
txFinishMsgLog = kernalCtx.log(CU.TX_MSG_FINISH_LOG_CATEGORY);
txLockMsgLog = kernalCtx.log(CU.TX_MSG_LOCK_LOG_CATEGORY);
txRecoveryMsgLog = kernalCtx.log(CU.TX_MSG_RECOVERY_LOG_CATEGORY);
}
/**
* @return Logger.
*/
public IgniteLogger messageLogger() {
return msgLog;
}
/**
* @return Logger.
*/
public IgniteLogger atomicMessageLogger() {
return atomicMsgLog;
}
/**
* @return Logger.
*/
public IgniteLogger txPrepareMessageLogger() {
return txPrepareMsgLog;
}
/**
* @return Logger.
*/
public IgniteLogger txFinishMessageLogger() {
return txFinishMsgLog;
}
/**
* @return Logger.
*/
public IgniteLogger txLockMessageLogger() {
return txLockMsgLog;
}
/**
* @return Logger.
*/
public IgniteLogger txRecoveryMessageLogger() {
return txRecoveryMsgLog;
}
/**
* @param reconnectFut Reconnect future.
* @throws IgniteCheckedException If failed.
*/
void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException {
for (ListIterator<? extends GridCacheSharedManager<?, ?>> it = mgrs.listIterator(mgrs.size());
it.hasPrevious();) {
GridCacheSharedManager<?, ?> mgr = it.previous();
mgr.onDisconnected(reconnectFut);
if (restartOnDisconnect(mgr))
mgr.onKernalStop(true);
}
for (ListIterator<? extends GridCacheSharedManager<?, ?>> it = mgrs.listIterator(mgrs.size()); it.hasPrevious();) {
GridCacheSharedManager<?, ?> mgr = it.previous();
if (restartOnDisconnect(mgr))
mgr.stop(true);
}
}
/**
* @throws IgniteCheckedException If failed.
*/
void onReconnected() throws IgniteCheckedException {
List<GridCacheSharedManager<K, V>> mgrs = new LinkedList<>();
setManagers(mgrs, txMgr,
jtaMgr,
verMgr,
mvccMgr,
new GridCacheDeploymentManager<K, V>(),
new GridCachePartitionExchangeManager<K, V>(),
affMgr,
ioMgr);
this.mgrs = mgrs;
for (GridCacheSharedManager<K, V> mgr : mgrs) {
if (restartOnDisconnect(mgr))
mgr.start(this);
}
for (GridCacheSharedManager<?, ?> mgr : mgrs)
mgr.onKernalStart(true);
}
/**
* @param mgr Manager.
* @return {@code True} if manager is restarted cn reconnect.
*/
private boolean restartOnDisconnect(GridCacheSharedManager<?, ?> mgr) {
return mgr instanceof GridCacheDeploymentManager || mgr instanceof GridCachePartitionExchangeManager;
}
/**
* @param mgrs Managers list.
* @param txMgr Transaction manager.
* @param verMgr Version manager.
* @param mvccMgr MVCC manager.
* @param depMgr Deployment manager.
* @param exchMgr Exchange manager.
* @param affMgr Affinity manager.
* @param ioMgr IO manager.
* @param jtaMgr JTA manager.
*/
private void setManagers(List<GridCacheSharedManager<K, V>> mgrs,
IgniteTxManager txMgr,
CacheJtaManagerAdapter jtaMgr,
GridCacheVersionManager verMgr,
GridCacheMvccManager mvccMgr,
GridCacheDeploymentManager<K, V> depMgr,
GridCachePartitionExchangeManager<K, V> exchMgr,
CacheAffinitySharedManager affMgr,
GridCacheIoManager ioMgr) {
this.mvccMgr = add(mgrs, mvccMgr);
this.verMgr = add(mgrs, verMgr);
this.txMgr = add(mgrs, txMgr);
this.jtaMgr = add(mgrs, jtaMgr);
this.depMgr = add(mgrs, depMgr);
this.exchMgr = add(mgrs, exchMgr);
this.affMgr = add(mgrs, affMgr);
this.ioMgr = add(mgrs, ioMgr);
}
/**
* Gets all cache contexts for local node.
*
* @return Collection of all cache contexts.
*/
public Collection<GridCacheContext> cacheContexts() {
return (Collection)ctxMap.values();
}
/**
* @return Cache processor.
*/
public GridCacheProcessor cache() {
return kernalCtx.cache();
}
/**
* Adds cache context to shared cache context.
*
* @param cacheCtx Cache context to add.
* @throws IgniteCheckedException If cache ID conflict detected.
*/
@SuppressWarnings("unchecked")
public void addCacheContext(GridCacheContext cacheCtx) throws IgniteCheckedException {
if (ctxMap.containsKey(cacheCtx.cacheId())) {
GridCacheContext<K, V> existing = ctxMap.get(cacheCtx.cacheId());
throw new IgniteCheckedException("Failed to start cache due to conflicting cache ID " +
"(change cache name and restart grid) [cacheName=" + cacheCtx.name() +
", conflictingCacheName=" + existing.name() + ']');
}
CacheStoreManager mgr = cacheCtx.store();
if (mgr.configured() && mgr.isLocal())
locStoreCnt.incrementAndGet();
ctxMap.put(cacheCtx.cacheId(), cacheCtx);
}
/**
* @param cacheCtx Cache context to remove.
*/
public void removeCacheContext(GridCacheContext cacheCtx) {
int cacheId = cacheCtx.cacheId();
ctxMap.remove(cacheId, cacheCtx);
CacheStoreManager mgr = cacheCtx.store();
if (mgr.configured() && mgr.isLocal())
locStoreCnt.decrementAndGet();
// Safely clean up the message listeners.
ioMgr.removeHandlers(cacheId);
}
/**
* Checks if cache context is closed.
*
* @param ctx Cache context to check.
* @return {@code True} if cache context is closed.
*/
public boolean closed(GridCacheContext ctx) {
return !ctxMap.containsKey(ctx.cacheId());
}
/**
* @return List of shared context managers in starting order.
*/
public List<GridCacheSharedManager<K, V>> managers() {
return mgrs;
}
/**
* Gets cache context by cache ID.
*
* @param cacheId Cache ID.
* @return Cache context.
*/
public GridCacheContext<K, V> cacheContext(int cacheId) {
return ctxMap.get(cacheId);
}
/**
* @return Grid name.
*/
public String gridName() {
return kernalCtx.gridName();
}
/**
* Gets transactions configuration.
*
* @return Transactions configuration.
*/
public TransactionConfiguration txConfig() {
return kernalCtx.config().getTransactionConfiguration();
}
/**
* @return Timeout for initial map exchange before preloading. We make it {@code 4} times
* bigger than network timeout by default.
*/
public long preloadExchangeTimeout() {
long t1 = gridConfig().getNetworkTimeout() * 4;
long t2 = gridConfig().getNetworkTimeout() * gridConfig().getCacheConfiguration().length * 2;
long timeout = Math.max(t1, t2);
return timeout < 0 ? Long.MAX_VALUE : timeout;
}
/**
* @return Deployment enabled flag.
*/
public boolean deploymentEnabled() {
return kernalContext().deploy().enabled();
}
/**
* @return Data center ID.
*/
public byte dataCenterId() {
// Data center ID is same for all caches, so grab the first one.
GridCacheContext<?, ?> cacheCtx = F.first(cacheContexts());
return cacheCtx.dataCenterId();
}
/**
* @return Transactional metrics adapter.
*/
public TransactionMetricsAdapter txMetrics() {
return txMetrics;
}
/**
* Resets tx metrics.
*/
public void resetTxMetrics() {
txMetrics = new TransactionMetricsAdapter();
}
/**
* @return Cache transaction manager.
*/
public IgniteTxManager tm() {
return txMgr;
}
/**
* @return JTA manager.
*/
public CacheJtaManagerAdapter jta() {
return jtaMgr;
}
/**
* @return Exchange manager.
*/
public GridCachePartitionExchangeManager<K, V> exchange() {
return exchMgr;
}
/**
* @return Affinity manager.
*/
public CacheAffinitySharedManager<K, V> affinity() {
return affMgr;
}
/**
* @return Lock order manager.
*/
public GridCacheVersionManager versions() {
return verMgr;
}
/**
* @return Lock manager.
*/
public GridCacheMvccManager mvcc() {
return mvccMgr;
}
/**
* @return IO manager.
*/
public GridCacheIoManager io() {
return ioMgr;
}
/**
* @return Cache deployment manager.
*/
public GridCacheDeploymentManager<K, V> deploy() {
return depMgr;
}
/**
* @return Marshaller.
*/
public Marshaller marshaller() {
return kernalCtx.config().getMarshaller();
}
/**
* @return Grid configuration.
*/
public IgniteConfiguration gridConfig() {
return kernalCtx.config();
}
/**
* @return Kernal context.
*/
public GridKernalContext kernalContext() {
return kernalCtx;
}
/**
* @return Grid IO manager.
*/
public GridIoManager gridIO() {
return kernalCtx.io();
}
/**
* @return Grid deployment manager.
*/
public GridDeploymentManager gridDeploy() {
return kernalCtx.deploy();
}
/**
* @return Grid event storage manager.
*/
public GridEventStorageManager gridEvents() {
return kernalCtx.event();
}
/**
* @return Discovery manager.
*/
public GridDiscoveryManager discovery() {
return kernalCtx.discovery();
}
/**
* @return Timeout processor.
*/
public GridTimeoutProcessor time() {
return kernalCtx.timeout();
}
/**
* @return Node ID.
*/
public UUID localNodeId() {
return kernalCtx.localNodeId();
}
/**
* @return Local node.
*/
public ClusterNode localNode() {
return kernalCtx.discovery().localNode();
}
/**
* @return Count of caches with configured local stores.
*/
public int getLocalStoreCount() {
return locStoreCnt.get();
}
/**
* @param nodeId Node ID.
* @return Node or {@code null}.
*/
@Nullable public ClusterNode node(UUID nodeId) {
return kernalCtx.discovery().node(nodeId);
}
/** Indicating whether local store keeps primary only. */
public boolean localStorePrimaryOnly() {
return locStorePrimaryOnly;
}
/**
* Gets grid logger for given class.
*
* @param cls Class to get logger for.
* @return IgniteLogger instance.
*/
public IgniteLogger logger(Class<?> cls) {
return kernalCtx.log(cls);
}
/**
* @param category Category.
* @return Logger.
*/
public IgniteLogger logger(String category) {
return kernalCtx.log(category);
}
/**
* Waits for partition locks and transactions release.
*
* @param topVer Topology version.
* @return {@code true} if waiting was successful.
*/
@SuppressWarnings({"unchecked"})
public IgniteInternalFuture<?> partitionReleaseFuture(AffinityTopologyVersion topVer) {
GridCompoundFuture f = new GridCompoundFuture();
f.add(mvcc().finishExplicitLocks(topVer));
f.add(tm().finishTxs(topVer));
f.add(mvcc().finishAtomicUpdates(topVer));
f.markInitialized();
return f;
}
/**
* Gets ready future for the next affinity topology version (used in cases when a node leaves grid).
*
* @param curVer Current topology version (before a node left grid).
* @return Ready future.
*/
public IgniteInternalFuture<?> nextAffinityReadyFuture(AffinityTopologyVersion curVer) {
if (curVer == null)
return null;
AffinityTopologyVersion nextVer = new AffinityTopologyVersion(curVer.topologyVersion() + 1);
IgniteInternalFuture<?> fut = exchMgr.affinityReadyFuture(nextVer);
return fut == null ? new GridFinishedFuture<>() : fut;
}
/**
* @param tx Transaction to check.
* @param activeCacheIds Active cache IDs.
* @param cacheCtx Cache context.
* @return Error message if transactions are incompatible.
*/
@Nullable public String verifyTxCompatibility(IgniteInternalTx tx, GridLongList activeCacheIds,
GridCacheContext<K, V> cacheCtx) {
if (cacheCtx.systemTx() && !tx.system())
return "system cache can be enlisted only in system transaction";
if (!cacheCtx.systemTx() && tx.system())
return "non-system cache can't be enlisted in system transaction";
for (int i = 0; i < activeCacheIds.size(); i++) {
int cacheId = (int)activeCacheIds.get(i);
GridCacheContext<K, V> activeCacheCtx = cacheContext(cacheId);
if (cacheCtx.systemTx()) {
if (activeCacheCtx.cacheId() != cacheCtx.cacheId())
return "system transaction can include only one cache";
}
CacheStoreManager store = cacheCtx.store();
CacheStoreManager activeStore = activeCacheCtx.store();
if (store.isLocal() != activeStore.isLocal())
return "caches with local and non-local stores can't be enlisted in one transaction";
if (store.isWriteBehind() != activeStore.isWriteBehind())
return "caches with different write-behind setting can't be enlisted in one transaction";
if (activeCacheCtx.deploymentEnabled() != cacheCtx.deploymentEnabled())
return "caches with enabled and disabled deployment modes can't be enlisted in one transaction";
// If local and write-behind validations passed, this must be true.
assert store.isWriteToStoreFromDht() == activeStore.isWriteToStoreFromDht();
}
return null;
}
/**
* @param ignore Transaction to ignore.
* @return Not null topology version if current thread holds lock preventing topology change.
*/
@Nullable public AffinityTopologyVersion lockedTopologyVersion(IgniteInternalTx ignore) {
long threadId = Thread.currentThread().getId();
AffinityTopologyVersion topVer = txMgr.lockedTopologyVersion(threadId, ignore);
if (topVer == null)
topVer = mvccMgr.lastExplicitLockTopologyVersion(threadId);
return topVer;
}
/**
* Nulling references to potentially leak-prone objects.
*/
public void cleanup() {
mvccMgr = null;
mgrs.clear();
}
/**
* @param tx Transaction to close.
* @throws IgniteCheckedException If failed.
*/
public void endTx(IgniteInternalTx tx) throws IgniteCheckedException {
tx.txState().awaitLastFut(this);
tx.close();
}
/**
* @param tx Transaction to commit.
* @return Commit future.
*/
@SuppressWarnings("unchecked")
public IgniteInternalFuture<IgniteInternalTx> commitTxAsync(IgniteInternalTx tx) {
GridCacheContext ctx = tx.txState().singleCacheContext(this);
if (ctx == null) {
tx.txState().awaitLastFut(this);
return tx.commitAsync();
}
else
return ctx.cache().commitTxAsync(tx);
}
/**
* @param tx Transaction to rollback.
* @throws IgniteCheckedException If failed.
* @return Rollback future.
*/
public IgniteInternalFuture rollbackTxAsync(IgniteInternalTx tx) throws IgniteCheckedException {
tx.txState().awaitLastFut(this);
return tx.rollbackAsync();
}
/**
* @return Store session listeners.
*/
@Nullable public Collection<CacheStoreSessionListener> storeSessionListeners() {
return storeSesLsnrs;
}
/**
* @param mgrs Managers list.
* @param mgr Manager to add.
* @return Added manager.
*/
@Nullable private <T extends GridCacheSharedManager<K, V>> T add(List<GridCacheSharedManager<K, V>> mgrs,
@Nullable T mgr) {
if (mgr != null)
mgrs.add(mgr);
return mgr;
}
/**
* Reset thread-local context for transactional cache.
*/
public void txContextReset() {
mvccMgr.contextReset();
}
}
| |
/*
* Copyright 2006 Sascha Weinreuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.intellij.plugins.intelliLang.inject.config;
import com.intellij.lang.Language;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.StringPattern;
import com.intellij.patterns.compiler.PatternCompiler;
import com.intellij.patterns.compiler.PatternCompilerFactory;
import com.intellij.psi.ElementManipulators;
import com.intellij.psi.LiteralTextEscaper;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import org.intellij.lang.annotations.RegExp;
import org.intellij.plugins.intelliLang.inject.InjectedLanguage;
import org.intellij.plugins.intelliLang.inject.InjectorUtils;
import org.jdom.CDATA;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Injection base class: Contains properties for language-id, prefix and suffix.
*/
public class BaseInjection implements Injection, PersistentStateComponent<Element> {
public static final Key<BaseInjection> INJECTION_KEY = Key.create("INJECTION_KEY");
@NotNull private final String mySupportId;
private String myDisplayName = "";
private String myInjectedLanguageId = "";
private String myPrefix = "";
private String mySuffix = "";
@NonNls
private String myValuePattern = "";
private Pattern myCompiledValuePattern;
private boolean mySingleFile;
public BaseInjection(@NotNull final String id) {
mySupportId = id;
}
public BaseInjection(@NotNull String supportId, @NotNull String injectedLanguageId, @NotNull String prefix, @NotNull String suffix, @NotNull InjectionPlace... places) {
mySupportId = supportId;
myInjectedLanguageId = injectedLanguageId;
myPrefix = prefix;
mySuffix = suffix;
myPlaces = places;
}
@Nullable
public Language getInjectedLanguage() {
return InjectedLanguage.findLanguageById(myInjectedLanguageId);
}
@NotNull
private InjectionPlace[] myPlaces = InjectionPlace.EMPTY_ARRAY;
@NotNull
public InjectionPlace[] getInjectionPlaces() {
return myPlaces;
}
public void setInjectionPlaces(@NotNull InjectionPlace... places) {
myPlaces = places;
}
@NotNull
public String getSupportId() {
return mySupportId;
}
@NotNull
public String getInjectedLanguageId() {
return myInjectedLanguageId;
}
@NotNull
public String getDisplayName() {
return myDisplayName;
}
public void setDisplayName(@NotNull String displayName) {
myDisplayName = displayName;
}
public void setInjectedLanguageId(@NotNull String injectedLanguageId) {
myInjectedLanguageId = injectedLanguageId;
}
@NotNull
public String getPrefix() {
return myPrefix;
}
public void setPrefix(@NotNull String prefix) {
myPrefix = prefix;
}
@NotNull
public String getSuffix() {
return mySuffix;
}
public void setSuffix(@NotNull String suffix) {
mySuffix = suffix;
}
@NotNull
public List<TextRange> getInjectedArea(final PsiElement element) {
final TextRange textRange = ElementManipulators.getValueTextRange(element);
if (myCompiledValuePattern == null) {
return Collections.singletonList(textRange);
}
else {
final LiteralTextEscaper<? extends PsiLanguageInjectionHost> textEscaper =
((PsiLanguageInjectionHost)element).createLiteralTextEscaper();
final StringBuilder sb = new StringBuilder();
textEscaper.decode(textRange, sb);
final List<TextRange> ranges = getMatchingRanges(myCompiledValuePattern.matcher(StringPattern.newBombedCharSequence(sb)), sb.length());
return !ranges.isEmpty() ? ContainerUtil.map(ranges, s -> new TextRange(textEscaper.getOffsetInHost(s.getStartOffset(), textRange), textEscaper.getOffsetInHost(s.getEndOffset(), textRange))) : Collections.emptyList();
}
}
public boolean isEnabled() {
for (InjectionPlace place : myPlaces) {
if (place.getElementPattern() != null && place.isEnabled()) return true;
}
return false;
}
public boolean acceptsPsiElement(final PsiElement element) {
ProgressManager.checkCanceled();
for (InjectionPlace place : myPlaces) {
if (place.isEnabled() && place.getElementPattern() != null && place.getElementPattern().accepts(element)) {
return true;
}
}
return false;
}
public boolean intersectsWith(final BaseInjection template) {
if (!Comparing.equal(getInjectedLanguageId(), template.getInjectedLanguageId())) return false;
for (InjectionPlace other : template.getInjectionPlaces()) {
if (ArrayUtil.contains(other, myPlaces)) return true;
}
return false;
}
public boolean sameLanguageParameters(final BaseInjection that) {
if (!myInjectedLanguageId.equals(that.myInjectedLanguageId)) return false;
if (!myPrefix.equals(that.myPrefix)) return false;
if (!mySuffix.equals(that.mySuffix)) return false;
if (!myValuePattern.equals(that.myValuePattern)) return false;
if (mySingleFile != that.mySingleFile) return false;
return true;
}
@SuppressWarnings({"unchecked"})
public BaseInjection copy() {
return new BaseInjection(mySupportId).copyFrom(this);
}
@SuppressWarnings({"RedundantIfStatement"})
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof BaseInjection)) return false;
final BaseInjection that = (BaseInjection)o;
if (!Comparing.equal(getDisplayName(), that.getDisplayName())) return false;
if (!sameLanguageParameters(that)) return false;
if (myPlaces.length != that.myPlaces.length) return false;
for (int i = 0, len = myPlaces.length; i < len; i++) {
if (myPlaces[i].isEnabled() != that.myPlaces[i].isEnabled()) {
return false;
}
}
// enabled flag is not counted this way:
if (!Arrays.equals(myPlaces, that.myPlaces)) return false;
return true;
}
public int hashCode() {
int result;
result = myInjectedLanguageId.hashCode();
result = 31 * result + myPrefix.hashCode();
result = 31 * result + mySuffix.hashCode();
result = 31 * result + myValuePattern.hashCode();
return result;
}
public BaseInjection copyFrom(@NotNull BaseInjection other) {
assert this != other;
myInjectedLanguageId = other.getInjectedLanguageId();
myPrefix = other.getPrefix();
mySuffix = other.getSuffix();
myDisplayName = other.getDisplayName();
setValuePattern(other.getValuePattern());
mySingleFile = other.mySingleFile;
myPlaces = other.getInjectionPlaces().clone();
return this;
}
public void loadState(@NotNull Element element) {
final PatternCompiler<PsiElement> helper = getCompiler();
myDisplayName = StringUtil.notNullize(element.getChildText("display-name"));
myInjectedLanguageId = StringUtil.notNullize(element.getAttributeValue("language"));
myPrefix = StringUtil.notNullize(element.getChildText("prefix"));
mySuffix = StringUtil.notNullize(element.getChildText("suffix"));
setValuePattern(element.getChildText("value-pattern"));
mySingleFile = element.getChild("single-file") != null;
readExternalImpl(element);
final List<Element> placeElements = element.getChildren("place");
myPlaces = InjectionPlace.ARRAY_FACTORY.create(placeElements.size());
for (int i = 0, placeElementsSize = placeElements.size(); i < placeElementsSize; i++) {
Element placeElement = placeElements.get(i);
final boolean enabled = !Boolean.parseBoolean(placeElement.getAttributeValue("disabled"));
final String text = placeElement.getText();
myPlaces[i] = new InjectionPlace(helper.createElementPattern(text, getDisplayName()), enabled);
}
if (myPlaces.length == 0) {
generatePlaces();
}
}
public PatternCompiler<PsiElement> getCompiler() {
return PatternCompilerFactory.getFactory().getPatternCompiler(InjectorUtils.getPatternClasses(getSupportId()));
}
public void generatePlaces() {
}
protected void readExternalImpl(Element e) {}
public final Element getState() {
final Element e = new Element("injection");
e.setAttribute("language", myInjectedLanguageId);
e.setAttribute("injector-id", mySupportId);
e.addContent(new Element("display-name").setText(getDisplayName()));
if (StringUtil.isNotEmpty(myPrefix)) {
e.addContent(new Element("prefix").setText(myPrefix));
}
if (StringUtil.isNotEmpty(mySuffix)) {
e.addContent(new Element("suffix").setText(mySuffix));
}
if (StringUtil.isNotEmpty(myValuePattern)) {
e.addContent(new Element("value-pattern").setText(myValuePattern));
}
if (mySingleFile) {
e.addContent(new Element("single-file"));
}
Arrays.sort(myPlaces, (o1, o2) -> Comparing.compare(o1.getText(), o2.getText()));
for (InjectionPlace place : myPlaces) {
final Element child = new Element("place").setContent(new CDATA(place.getText()));
if (!place.isEnabled()) child.setAttribute("disabled", "true");
e.addContent(child);
}
writeExternalImpl(e);
return e;
}
protected void writeExternalImpl(Element e) {}
@NotNull
public String getValuePattern() {
return myValuePattern;
}
public void setValuePattern(@RegExp @Nullable String pattern) {
try {
if (pattern != null && pattern.length() > 0) {
myValuePattern = pattern;
myCompiledValuePattern = Pattern.compile(pattern, Pattern.DOTALL);
}
else {
myValuePattern = "";
myCompiledValuePattern = null;
}
}
catch (Exception e1) {
myCompiledValuePattern = null;
Logger.getInstance(getClass().getName()).info("Invalid pattern", e1);
}
}
public boolean isSingleFile() {
return mySingleFile;
}
public void setSingleFile(final boolean singleFile) {
mySingleFile = singleFile;
}
/**
* Determines if further injections should be examined if {@code isApplicable} has returned true.
* <p/>
* This is determined by the presence of a value-pattern: If none is present, the entry is considered
* to be a terminal one.
*
* @return true to stop, false to continue
*/
public boolean isTerminal() {
return myCompiledValuePattern == null;
}
private static List<TextRange> getMatchingRanges(Matcher matcher, final int length) {
final List<TextRange> list = new SmartList<>();
int start = 0;
while (start < length && matcher.find(start)) {
final int groupCount = matcher.groupCount();
if (groupCount != 0) {
for (int i = 1; i <= groupCount; i++) {
start = matcher.start(i);
if (start == -1) continue;
list.add(new TextRange(start, matcher.end(i)));
}
if (start >= matcher.end()) break;
}
start = matcher.end();
}
return list;
}
public void mergeOriginalPlacesFrom(final BaseInjection injection, final boolean enabled) {
for (InjectionPlace place : injection.getInjectionPlaces()) {
if (!ArrayUtil.contains(place, myPlaces)) {
myPlaces = ArrayUtil.append(myPlaces, enabled || !place.isEnabled() ? place : place.enabled(false), InjectionPlace.ARRAY_FACTORY);
}
}
}
public void setPlaceEnabled(@Nullable final String text, final boolean enabled) {
for (int i = 0; i < myPlaces.length; i++) {
final InjectionPlace cur = myPlaces[i];
if (text == null || Comparing.equal(text, cur.getText())) {
if (cur.isEnabled() != enabled) {
myPlaces[i] = cur.enabled(enabled);
}
}
}
}
public boolean acceptForReference(PsiElement element) {
return acceptsPsiElement(element);
}
@Override
public String toString() {
return getInjectedLanguageId()+ "->" +getDisplayName();
}
}
| |
/*
*
* Copyright 2015 Robert Winkler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package io.github.robwin.swagger2markup.builder.document;
import com.google.common.collect.ImmutableMap;
import io.github.robwin.markup.builder.MarkupDocBuilder;
import io.github.robwin.markup.builder.MarkupDocBuilders;
import io.swagger.models.ComposedModel;
import io.swagger.models.Model;
import io.swagger.models.RefModel;
import io.swagger.models.Swagger;
import io.swagger.models.properties.Property;
import io.github.robwin.markup.builder.MarkupLanguage;
import io.github.robwin.swagger2markup.utils.PropertyUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
/**
* @author Robert Winkler
*/
public class DefinitionsDocument extends MarkupDocument {
private static final String DEFINITIONS = "Definitions";
private static final List<String> IGNORED_DEFINITIONS = Collections.singletonList("Void");
private static final String JSON_SCHEMA = "JSON Schema";
private static final String XML_SCHEMA = "XML Schema";
private static final String JSON_SCHEMA_EXTENSION = ".json";
private static final String XML_SCHEMA_EXTENSION = ".xsd";
private static final String JSON = "json";
private static final String XML = "xml";
private static final String DESCRIPTION_FILE_NAME = "description";
private boolean schemasEnabled;
private String schemasFolderPath;
private boolean handWrittenDescriptionsEnabled;
private String descriptionsFolderPath;
private boolean separatedDefinitionsEnabled;
private String outputDirectory;
public DefinitionsDocument(Swagger swagger, MarkupLanguage markupLanguage, String schemasFolderPath, String descriptionsFolderPath, boolean separatedDefinitionsEnabled, String outputDirectory){
super(swagger, markupLanguage);
if(StringUtils.isNotBlank(schemasFolderPath)){
this.schemasEnabled = true;
this.schemasFolderPath = schemasFolderPath;
}
if(StringUtils.isNotBlank(descriptionsFolderPath)){
this.handWrittenDescriptionsEnabled = true;
this.descriptionsFolderPath = descriptionsFolderPath + "/" + DEFINITIONS.toLowerCase();
}
if(schemasEnabled){
if (logger.isDebugEnabled()) {
logger.debug("Include schemas is enabled.");
}
}else{
if (logger.isDebugEnabled()) {
logger.debug("Include schemas is disabled.");
}
}
if(handWrittenDescriptionsEnabled){
if (logger.isDebugEnabled()) {
logger.debug("Include hand-written descriptions is enabled.");
}
}else{
if (logger.isDebugEnabled()) {
logger.debug("Include hand-written descriptions is disabled.");
}
}
this.separatedDefinitionsEnabled = separatedDefinitionsEnabled;
if(this.separatedDefinitionsEnabled){
if (logger.isDebugEnabled()) {
logger.debug("Create separated definition files is enabled.");
}
Validate.notEmpty(outputDirectory, "Output directory is required for separated definition files!");
}else{
if (logger.isDebugEnabled()) {
logger.debug("Create separated definition files is disabled.");
}
}
this.outputDirectory = outputDirectory;
}
@Override
public MarkupDocument build() throws IOException {
definitions(swagger.getDefinitions(), this.markupDocBuilder);
return this;
}
/**
* Builds the Swagger definitions.
*
* @param definitions the Swagger definitions
* @param docBuilder the doc builder to use for output
*/
private void definitions(Map<String, Model> definitions, MarkupDocBuilder docBuilder) throws IOException {
if(MapUtils.isNotEmpty(definitions)){
docBuilder.sectionTitleLevel1(DEFINITIONS);
for(Map.Entry<String, Model> definitionsEntry : definitions.entrySet()){
String definitionName = definitionsEntry.getKey();
if(StringUtils.isNotBlank(definitionName)) {
if (checkThatDefinitionIsNotInIgnoreList(definitionName)) {
definition(definitions, definitionName, definitionsEntry.getValue(), docBuilder);
definitionSchema(definitionName, docBuilder);
if (separatedDefinitionsEnabled) {
MarkupDocBuilder defDocBuilder = MarkupDocBuilders.documentBuilder(markupLanguage);
definition(definitions, definitionName, definitionsEntry.getValue(), defDocBuilder);
definitionSchema(definitionName, defDocBuilder);
defDocBuilder.writeToFile(outputDirectory, definitionName.toLowerCase(), StandardCharsets.UTF_8);
if (logger.isInfoEnabled()) {
logger.info("Separate definition file produced: {}", definitionName);
}
}
if (logger.isInfoEnabled()) {
logger.info("Definition processed: {}", definitionName);
}
}else{
if (logger.isDebugEnabled()) {
logger.debug("Definition was ignored: {}", definitionName);
}
}
}
}
}
}
/**
* Checks that the definition is not in the list of ignored definitions.
*
* @param definitionName the name of the definition
* @return true if the definition can be processed
*/
private boolean checkThatDefinitionIsNotInIgnoreList(String definitionName) {
return !IGNORED_DEFINITIONS.contains(definitionName);
}
/**
* Builds a concrete definition
*
* @param definitionName the name of the definition
* @param model the Swagger Model of the definition
* @param docBuilder the docbuilder do use for output
*/
private void definition(Map<String, Model> definitions, String definitionName, Model model, MarkupDocBuilder docBuilder) throws IOException {
docBuilder.sectionTitleLevel2(definitionName);
descriptionSection(definitionName, model, docBuilder);
propertiesSection(definitions, definitionName, model, docBuilder);
}
private void propertiesSection(Map<String, Model> definitions, String definitionName, Model model, MarkupDocBuilder docBuilder) throws IOException {
Map<String, Property> properties = getAllProperties(definitions, model);
List<String> headerAndContent = new ArrayList<>();
List<String> header = Arrays.asList(NAME_COLUMN, DESCRIPTION_COLUMN, REQUIRED_COLUMN, SCHEMA_COLUMN, DEFAULT_COLUMN);
headerAndContent.add(StringUtils.join(header, DELIMITER));
if(MapUtils.isNotEmpty(properties)){
for (Map.Entry<String, Property> propertyEntry : properties.entrySet()) {
Property property = propertyEntry.getValue();
String propertyName = propertyEntry.getKey();
List<String> content = Arrays.asList(
propertyName,
propertyDescription(definitionName, propertyName, property),
Boolean.toString(property.getRequired()),
PropertyUtils.getType(property, markupLanguage),
PropertyUtils.getDefaultValue(property));
headerAndContent.add(StringUtils.join(content, DELIMITER));
}
docBuilder.tableWithHeaderRow(headerAndContent);
}
}
private Map<String, Property> getAllProperties(Map<String, Model> definitions, Model model) {
if(model instanceof RefModel) {
final String ref = model.getReference();
return definitions.containsKey(ref)
? getAllProperties(definitions, definitions.get(model.getReference()))
: null;
}
if(model instanceof ComposedModel) {
ComposedModel composedModel = (ComposedModel)model;
ImmutableMap.Builder<String, Property> allProperties = ImmutableMap.builder();
if(composedModel.getAllOf() != null) {
for(Model innerModel : composedModel.getAllOf()) {
Map<String, Property> innerProperties = getAllProperties(definitions, innerModel);
if(innerProperties != null) {
allProperties.putAll(innerProperties);
}
}
}
return allProperties.build();
}
else {
return model.getProperties();
}
}
private void descriptionSection(String definitionName, Model model, MarkupDocBuilder docBuilder) throws IOException {
if(handWrittenDescriptionsEnabled){
String description = handWrittenPathDescription(definitionName.toLowerCase(), DESCRIPTION_FILE_NAME);
if(StringUtils.isNotBlank(description)){
docBuilder.paragraph(description);
}else{
if (logger.isInfoEnabled()) {
logger.info("Hand-written description cannot be read. Trying to use description from Swagger source.");
}
modelDescription(model, docBuilder);
}
}
else{
modelDescription(model, docBuilder);
}
}
private void modelDescription(Model model, MarkupDocBuilder docBuilder) {
String description = model.getDescription();
if (StringUtils.isNotBlank(description)) {
docBuilder.paragraph(description);
}
}
private String propertyDescription(String definitionName, String propertyName, Property property) throws IOException {
String description;
if(handWrittenDescriptionsEnabled){
description = handWrittenPathDescription(definitionName.toLowerCase() + "/" + propertyName.toLowerCase(), DESCRIPTION_FILE_NAME);
if(StringUtils.isBlank(description)) {
if (logger.isInfoEnabled()) {
logger.info("Hand-written description file cannot be read. Trying to use description from Swagger source.");
}
description = StringUtils.defaultString(property.getDescription());
}
}
else{
description = StringUtils.defaultString(property.getDescription());
}
return description;
}
/**
* Reads a hand-written description
*
* @param descriptionFolder the name of the folder where the description file resides
* @param descriptionFileName the name of the description file
* @return the content of the file
* @throws IOException
*/
private String handWrittenPathDescription(String descriptionFolder, String descriptionFileName) throws IOException {
for (String fileNameExtension : markupLanguage.getFileNameExtensions()) {
java.nio.file.Path path = Paths.get(descriptionsFolderPath, descriptionFolder, descriptionFileName + fileNameExtension);
if (Files.isReadable(path)) {
if (logger.isInfoEnabled()) {
logger.info("Description file processed: {}", path);
}
return FileUtils.readFileToString(path.toFile(), StandardCharsets.UTF_8).trim();
} else {
if (logger.isDebugEnabled()) {
logger.debug("Description file is not readable: {}", path);
}
}
}
if (logger.isWarnEnabled()) {
logger.info("No description file found with correct file name extension in folder: {}", Paths.get(descriptionsFolderPath, descriptionFolder));
}
return null;
}
private void definitionSchema(String definitionName, MarkupDocBuilder docBuilder) throws IOException {
if(schemasEnabled) {
if (StringUtils.isNotBlank(definitionName)) {
schema(JSON_SCHEMA, schemasFolderPath, definitionName + JSON_SCHEMA_EXTENSION, JSON, docBuilder);
schema(XML_SCHEMA, schemasFolderPath, definitionName + XML_SCHEMA_EXTENSION, XML, docBuilder);
}
}
}
private void schema(String title, String schemasFolderPath, String schemaName, String language, MarkupDocBuilder docBuilder) throws IOException {
java.nio.file.Path path = Paths.get(schemasFolderPath, schemaName);
if (Files.isReadable(path)) {
docBuilder.sectionTitleLevel3(title);
docBuilder.source(FileUtils.readFileToString(path.toFile(), StandardCharsets.UTF_8).trim(), language);
if (logger.isInfoEnabled()) {
logger.info("Schema file processed: {}", path);
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Schema file is not readable: {}", path);
}
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.worklink.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/worklink-2018-09-25/UpdateCompanyNetworkConfiguration"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateCompanyNetworkConfigurationRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN of the fleet.
* </p>
*/
private String fleetArn;
/**
* <p>
* The VPC with connectivity to associated websites.
* </p>
*/
private String vpcId;
/**
* <p>
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* </p>
*/
private java.util.List<String> subnetIds;
/**
* <p>
* The security groups associated with access to the provided subnets.
* </p>
*/
private java.util.List<String> securityGroupIds;
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @param fleetArn
* The ARN of the fleet.
*/
public void setFleetArn(String fleetArn) {
this.fleetArn = fleetArn;
}
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @return The ARN of the fleet.
*/
public String getFleetArn() {
return this.fleetArn;
}
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @param fleetArn
* The ARN of the fleet.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withFleetArn(String fleetArn) {
setFleetArn(fleetArn);
return this;
}
/**
* <p>
* The VPC with connectivity to associated websites.
* </p>
*
* @param vpcId
* The VPC with connectivity to associated websites.
*/
public void setVpcId(String vpcId) {
this.vpcId = vpcId;
}
/**
* <p>
* The VPC with connectivity to associated websites.
* </p>
*
* @return The VPC with connectivity to associated websites.
*/
public String getVpcId() {
return this.vpcId;
}
/**
* <p>
* The VPC with connectivity to associated websites.
* </p>
*
* @param vpcId
* The VPC with connectivity to associated websites.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withVpcId(String vpcId) {
setVpcId(vpcId);
return this;
}
/**
* <p>
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* </p>
*
* @return The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
*/
public java.util.List<String> getSubnetIds() {
return subnetIds;
}
/**
* <p>
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* </p>
*
* @param subnetIds
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
*/
public void setSubnetIds(java.util.Collection<String> subnetIds) {
if (subnetIds == null) {
this.subnetIds = null;
return;
}
this.subnetIds = new java.util.ArrayList<String>(subnetIds);
}
/**
* <p>
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSubnetIds(java.util.Collection)} or {@link #withSubnetIds(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param subnetIds
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withSubnetIds(String... subnetIds) {
if (this.subnetIds == null) {
setSubnetIds(new java.util.ArrayList<String>(subnetIds.length));
}
for (String ele : subnetIds) {
this.subnetIds.add(ele);
}
return this;
}
/**
* <p>
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* </p>
*
* @param subnetIds
* The subnets used for X-ENI connections from Amazon WorkLink rendering containers.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withSubnetIds(java.util.Collection<String> subnetIds) {
setSubnetIds(subnetIds);
return this;
}
/**
* <p>
* The security groups associated with access to the provided subnets.
* </p>
*
* @return The security groups associated with access to the provided subnets.
*/
public java.util.List<String> getSecurityGroupIds() {
return securityGroupIds;
}
/**
* <p>
* The security groups associated with access to the provided subnets.
* </p>
*
* @param securityGroupIds
* The security groups associated with access to the provided subnets.
*/
public void setSecurityGroupIds(java.util.Collection<String> securityGroupIds) {
if (securityGroupIds == null) {
this.securityGroupIds = null;
return;
}
this.securityGroupIds = new java.util.ArrayList<String>(securityGroupIds);
}
/**
* <p>
* The security groups associated with access to the provided subnets.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSecurityGroupIds(java.util.Collection)} or {@link #withSecurityGroupIds(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param securityGroupIds
* The security groups associated with access to the provided subnets.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withSecurityGroupIds(String... securityGroupIds) {
if (this.securityGroupIds == null) {
setSecurityGroupIds(new java.util.ArrayList<String>(securityGroupIds.length));
}
for (String ele : securityGroupIds) {
this.securityGroupIds.add(ele);
}
return this;
}
/**
* <p>
* The security groups associated with access to the provided subnets.
* </p>
*
* @param securityGroupIds
* The security groups associated with access to the provided subnets.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateCompanyNetworkConfigurationRequest withSecurityGroupIds(java.util.Collection<String> securityGroupIds) {
setSecurityGroupIds(securityGroupIds);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFleetArn() != null)
sb.append("FleetArn: ").append(getFleetArn()).append(",");
if (getVpcId() != null)
sb.append("VpcId: ").append(getVpcId()).append(",");
if (getSubnetIds() != null)
sb.append("SubnetIds: ").append(getSubnetIds()).append(",");
if (getSecurityGroupIds() != null)
sb.append("SecurityGroupIds: ").append(getSecurityGroupIds());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateCompanyNetworkConfigurationRequest == false)
return false;
UpdateCompanyNetworkConfigurationRequest other = (UpdateCompanyNetworkConfigurationRequest) obj;
if (other.getFleetArn() == null ^ this.getFleetArn() == null)
return false;
if (other.getFleetArn() != null && other.getFleetArn().equals(this.getFleetArn()) == false)
return false;
if (other.getVpcId() == null ^ this.getVpcId() == null)
return false;
if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false)
return false;
if (other.getSubnetIds() == null ^ this.getSubnetIds() == null)
return false;
if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false)
return false;
if (other.getSecurityGroupIds() == null ^ this.getSecurityGroupIds() == null)
return false;
if (other.getSecurityGroupIds() != null && other.getSecurityGroupIds().equals(this.getSecurityGroupIds()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFleetArn() == null) ? 0 : getFleetArn().hashCode());
hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode());
hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode());
hashCode = prime * hashCode + ((getSecurityGroupIds() == null) ? 0 : getSecurityGroupIds().hashCode());
return hashCode;
}
@Override
public UpdateCompanyNetworkConfigurationRequest clone() {
return (UpdateCompanyNetworkConfigurationRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.regex.Pattern;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.AttributeValueDecorator;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.io.StreamUtils;
import org.apache.nifi.logging.ProcessorLog;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.DataUnit;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessorInitializationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.annotation.CapabilityDescription;
import org.apache.nifi.processor.annotation.EventDriven;
import org.apache.nifi.processor.annotation.SideEffectFree;
import org.apache.nifi.processor.annotation.SupportsBatching;
import org.apache.nifi.processor.annotation.Tags;
import org.apache.nifi.processor.io.InputStreamCallback;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.util.IntegerHolder;
@EventDriven
@SideEffectFree
@SupportsBatching
@Tags({"route", "content", "regex", "regular expression", "regexp"})
@CapabilityDescription("Applies Regular Expressions to the content of a FlowFile and routes a copy of the FlowFile to each "
+ "destination whose Regular Expression matches. Regular Expressions are added as User-Defined Properties where the name "
+ "of the property is the name of the relationship and the value is a Regular Expression to match against the FlowFile "
+ "content. User-Defined properties do support the Attribute Expression Language, but the results are interpreted as "
+ "literal values, not Regular Expressions")
public class RouteOnContent extends AbstractProcessor {
public static final String ROUTE_ATTRIBUTE_KEY = "RouteOnContent.Route";
public static final String MATCH_ALL = "content must match exactly";
public static final String MATCH_SUBSEQUENCE = "content must contain match";
public static final PropertyDescriptor BUFFER_SIZE = new PropertyDescriptor.Builder()
.name("Content Buffer Size")
.description("Specifies the maximum amount of data to buffer in order to apply the regular expressions. If the size of the FlowFile exceeds this value, any amount of this value will be ignored")
.required(true)
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
.defaultValue("1 MB")
.build();
public static final PropertyDescriptor MATCH_REQUIREMENT = new PropertyDescriptor.Builder()
.name("Match Requirement")
.description("Specifies whether the entire content of the file must match the regular expression exactly, or if any part of the file (up to Content Buffer Size) can contain the regular expression in order to be considered a match")
.required(true)
.allowableValues(MATCH_ALL, MATCH_SUBSEQUENCE)
.defaultValue(MATCH_ALL)
.build();
public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
.name("Character Set")
.description("The Character Set in which the file is encoded")
.required(true)
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
.defaultValue("UTF-8")
.build();
public static final Relationship REL_NO_MATCH = new Relationship.Builder().name("unmatched")
.description("FlowFiles that do not match any of the user-supplied regular expressions will be routed to this relationship").build();
private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
private List<PropertyDescriptor> properties;
@Override
protected void init(final ProcessorInitializationContext context) {
final Set<Relationship> relationships = new HashSet<>();
relationships.add(REL_NO_MATCH);
this.relationships.set(Collections.unmodifiableSet(relationships));
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(MATCH_REQUIREMENT);
properties.add(CHARACTER_SET);
properties.add(BUFFER_SIZE);
this.properties = Collections.unmodifiableList(properties);
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@Override
public Set<Relationship> getRelationships() {
return relationships.get();
}
@Override
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
if (propertyDescriptorName.equals(REL_NO_MATCH.getName())) {
return null;
}
return new PropertyDescriptor.Builder()
.required(false)
.name(propertyDescriptorName)
.addValidator(StandardValidators.createRegexValidator(0, Integer.MAX_VALUE, true))
.dynamic(true)
.expressionLanguageSupported(true)
.build();
}
@Override
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
if (descriptor.isDynamic()) {
final Set<Relationship> relationships = new HashSet<>(this.relationships.get());
final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
if (newValue == null) {
relationships.remove(relationship);
} else {
relationships.add(relationship);
}
this.relationships.set(relationships);
}
}
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final List<FlowFile> flowFiles = session.get(1);
if (flowFiles.isEmpty()) {
return;
}
final AttributeValueDecorator quoteDecorator = new AttributeValueDecorator() {
@Override
public String decorate(final String attributeValue) {
return (attributeValue == null) ? null : Pattern.quote(attributeValue);
}
};
final Map<FlowFile, Set<Relationship>> flowFileDestinationMap = new HashMap<>();
final ProcessorLog logger = getLogger();
final Charset charset = Charset.forName(context.getProperty(CHARACTER_SET).getValue());
final byte[] buffer = new byte[context.getProperty(BUFFER_SIZE).asDataSize(DataUnit.B).intValue()];
for (final FlowFile flowFile : flowFiles) {
final Set<Relationship> destinations = new HashSet<>();
flowFileDestinationMap.put(flowFile, destinations);
final IntegerHolder bufferedByteCount = new IntegerHolder(0);
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(final InputStream in) throws IOException {
bufferedByteCount.set(StreamUtils.fillBuffer(in, buffer, false));
}
});
final String contentString = new String(buffer, 0, bufferedByteCount.get(), charset);
for (final PropertyDescriptor descriptor : context.getProperties().keySet()) {
if (!descriptor.isDynamic()) {
continue;
}
final String regex = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile, quoteDecorator).getValue();
final Pattern pattern = Pattern.compile(regex);
final boolean matches;
if (context.getProperty(MATCH_REQUIREMENT).getValue().equalsIgnoreCase(MATCH_ALL)) {
matches = pattern.matcher(contentString).matches();
} else {
matches = pattern.matcher(contentString).find();
}
if (matches) {
final Relationship relationship = new Relationship.Builder().name(descriptor.getName()).build();
destinations.add(relationship);
}
}
}
for (final Map.Entry<FlowFile, Set<Relationship>> entry : flowFileDestinationMap.entrySet()) {
FlowFile flowFile = entry.getKey();
final Set<Relationship> destinations = entry.getValue();
if (destinations.isEmpty()) {
flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, REL_NO_MATCH.getName());
session.transfer(flowFile, REL_NO_MATCH);
session.getProvenanceReporter().route(flowFile, REL_NO_MATCH);
logger.info("Routing {} to 'unmatched'", new Object[]{flowFile});
} else {
final Relationship firstRelationship = destinations.iterator().next();
destinations.remove(firstRelationship);
for (final Relationship relationship : destinations) {
FlowFile clone = session.clone(flowFile);
clone = session.putAttribute(clone, ROUTE_ATTRIBUTE_KEY, relationship.getName());
session.getProvenanceReporter().route(clone, relationship);
session.transfer(clone, relationship);
logger.info("Cloning {} to {} and routing clone to {}", new Object[]{flowFile, clone, relationship});
}
flowFile = session.putAttribute(flowFile, ROUTE_ATTRIBUTE_KEY, firstRelationship.getName());
session.getProvenanceReporter().route(flowFile, firstRelationship);
session.transfer(flowFile, firstRelationship);
logger.info("Routing {} to {}", new Object[]{flowFile, firstRelationship});
}
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.oracle.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.ext.oracle.data.OracleBinaryFormatter;
import org.jkiss.dbeaver.model.data.DBDBinaryFormatter;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCDatabaseMetaData;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCSQLDialect;
import org.jkiss.dbeaver.model.impl.sql.BasicSQLDialect;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.sql.SQLConstants;
import org.jkiss.dbeaver.model.struct.rdb.DBSProcedure;
import org.jkiss.utils.ArrayUtils;
import java.util.Arrays;
/**
* Oracle SQL dialect
*/
class OracleSQLDialect extends JDBCSQLDialect {
public static final String[] EXEC_KEYWORDS = new String[]{ "call" };
public static final String[] ORACLE_NON_TRANSACTIONAL_KEYWORDS = ArrayUtils.concatArrays(
BasicSQLDialect.NON_TRANSACTIONAL_KEYWORDS,
new String[]{
"CREATE", "ALTER", "DROP",
"ANALYZE", "VALIDATE",
}
);
public static final String[][] ORACLE_BEGIN_END_BLOCK = new String[][]{
{SQLConstants.BLOCK_BEGIN, SQLConstants.BLOCK_END},
{"IF", SQLConstants.BLOCK_END},
{"LOOP", SQLConstants.BLOCK_END + " LOOP"},
{"CASE", SQLConstants.BLOCK_END + " CASE"},
};
public static final String[] ORACLE_BLOCK_HEADERS = new String[]{
"DECLARE",
"FUNCTION",
"PROCEDURE",
//"IS",
};
public static final String[] ADVANCED_KEYWORDS = {
"REPLACE",
"PACKAGE",
"FUNCTION",
"TYPE",
"TRIGGER",
"MATERIALIZED",
"IF",
"EACH",
"RETURN",
"WRAPPED",
"AFTER",
"BEFORE",
"DATABASE",
"ANALYZE",
"VALIDATE",
"STRUCTURE",
"COMPUTE",
"STATISTICS",
"LOOP",
"WHILE",
"BULK",
"ELSIF",
"EXIT",
};
private boolean crlfBroken;
private DBPPreferenceStore preferenceStore;
public OracleSQLDialect() {
super("Oracle");
}
public void initDriverSettings(JDBCDataSource dataSource, JDBCDatabaseMetaData metaData) {
super.initDriverSettings(dataSource, metaData);
crlfBroken = !dataSource.isServerVersionAtLeast(11, 0);
preferenceStore = dataSource.getContainer().getPreferenceStore();
addFunctions(
Arrays.asList(
"SUBSTR", "APPROX_COUNT_DISTINCT",
"REGEXP_SUBSTR", "REGEXP_INSTR", "REGEXP_REPLACE", "REGEXP_LIKE", "REGEXP_COUNT",
// Additions from #323
//Number Functions:
"BITAND",
"COSH",
"NANVL",
"REMAINDER",
"SINH",
"TANH",
"TRUNC",
//Character Functions Returning Character Values:
"CHR",
"INITCAP",
"LPAD",
"NLS_INITCAP",
"NLS_LOWER",
"NLSSORT",
"NLS_UPPER",
"RPAD",
"REVERSE",
"SUBSTRB",
"SUBSTRC",
"SUBSTR2",
"SUBSTR4",
// NLS Character Functions:
"NLS_CHARSET_DECL_LEN",
"NLS_CHARSET_ID",
"NLS_CHARSET_NAME",
//Character Functions Returning Number VALUES:
"INSTR",
"INSTRB",
"INSTRC",
"INSTR2",
"INSTR4",
"LENGTHB",
//Datetime Functions:
"ADD_MONTHS",
"DBTIMEZONE",
"FROM_TZ",
"LAST_DAY",
"MONTHS_BETWEEN",
"NEW_TIME",
"NEXT_DAY",
"NUMTODSINTERVAL",
"NUMTOYMINTERVAL",
"SESSIONTIMEZONE",
"SYS_EXTRACT_UTC",
"SYSDATE",
"SYSTIMESTAMP",
"TO_CHAR",
"TO_TIMESTAMP",
"TO_TIMESTAMP_TZ",
"TO_DSINTERVAL",
"TO_YMINTERVAL",
"TRUNC",
"TZ_OFFSET",
//General Comparison Functions:
"GREATEST",
"LEAST",
//Conversion Functions:
"ASCIISTR",
"BIN_TO_NUM",
"CHARTOROWID",
"COMPOSE",
"DECOMPOSE",
"HEXTORAW",
"NUMTODSINTERVAL",
"NUMTOYMINTERVAL",
"RAWTOHEX",
"RAWTONHEX",
"ROWIDTOCHAR",
"ROWIDTONCHAR",
"SCN_TO_TIMESTAMP",
"TIMESTAMP_TO_SCN",
"TO_BINARY_DOUBLE",
"TO_BINARY_FLOAT",
"TO_CHAR",
"TO_CLOB",
"TO_DATE",
"TO_DSINTERVAL",
"TO_LOB",
"TO_MULTI_BYTE",
"TO_NCHAR",
"TO_NCLOB",
"TO_NUMBER",
"TO_DSINTERVAL",
"TO_SINGLE_BYTE",
"TO_TIMESTAMP",
"TO_TIMESTAMP_TZ",
"TO_YMINTERVAL",
"TO_YMINTERVAL",
"UNISTR",
//Large Object Functions:
"BFILENAME",
"EMPTY_BLOB",
"EMPTY_CLOB",
//Collection Functions:
"POWERMULTISET",
"POWERMULTISET_BY_CARDINALITY",
//Hierarchical FUNCTION:
"SYS_CONNECT_BY_PATH",
//Data Mining Functions:
"CLUSTER_ID",
"CLUSTER_PROBABILITY",
"CLUSTER_SET",
"FEATURE_ID",
"FEATURE_SET",
"FEATURE_VALUE",
"PREDICTION",
"PREDICTION_COST",
"PREDICTION_DETAILS",
"PREDICTION_PROBABILITY",
"PREDICTION_SET",
//XML Functions:
"APPENDCHILDXML",
"DELETEXML",
"DEPTH",
"EXISTSNODE",
"EXTRACTVALUE",
"INSERTCHILDXML",
"INSERTXMLBEFORE",
"PATH",
"SYS_DBURIGEN",
"SYS_XMLAGG",
"SYS_XMLGEN",
"UPDATEXML",
"XMLAGG",
"XMLCDATA",
"XMLCOLATTVAL",
"XMLCOMMENT",
"XMLCONCAT",
"XMLFOREST",
"XMLPARSE",
"XMLPI",
"XMLQUERY",
"XMLROOT",
"XMLSEQUENCE",
"XMLSERIALIZE",
"XMLTABLE",
"XMLTRANSFORM",
//Encoding and Decoding Functions:
"DECODE",
"DUMP",
"ORA_HASH",
"VSIZE",
//NULL-Related Functions:
"LNNVL",
"NVL",
"NVL2",
//Environment and Identifier Functions:
"SYS_CONTEXT",
"SYS_GUID",
"SYS_TYPEID",
"UID",
"USERENV",
//Aggregate Functions:
"CORR_S",
"CORR_K",
"FIRST",
"GROUP_ID",
"GROUPING_ID",
"LAST",
"MEDIAN",
"STATS_BINOMIAL_TEST",
"STATS_CROSSTAB",
"STATS_F_TEST",
"STATS_KS_TEST",
"STATS_MODE",
"STATS_MW_TEST",
"STATS_ONE_WAY_ANOVA",
"STATS_T_TEST_ONE",
"STATS_T_TEST_PAIRED",
"STATS_T_TEST_INDEP",
"STATS_T_TEST_INDEPU",
"STATS_WSR_TEST",
"STDDEV",
"VARIANCE",
//Analytic Functions:
"FIRST",
"FIRST_VALUE",
"LAG",
"LAST",
"LAST_VALUE",
"LEAD",
"NTILE",
"RATIO_TO_REPORT",
"STDDEV",
"VARIANCE",
//Object Reference Functions:
"MAKE_REF",
"REFTOHEX",
//Model Functions:
"CV",
"ITERATION_NUMBER",
"PRESENTNNV",
"PRESENTV",
"PREVIOUS",
// Other #4134
"EXTRACT",
"LISTAGG",
"OVER"
));
removeSQLKeyword("SYSTEM");
for (String kw : ADVANCED_KEYWORDS) {
addSQLKeyword(kw);
}
}
@Override
public String[][] getBlockBoundStrings() {
return ORACLE_BEGIN_END_BLOCK;
}
@Override
public String[] getBlockHeaderStrings() {
return ORACLE_BLOCK_HEADERS;
}
@NotNull
@Override
public String[] getExecuteKeywords() {
return EXEC_KEYWORDS;
}
@NotNull
@Override
public MultiValueInsertMode getMultiValueInsertMode() {
return MultiValueInsertMode.GROUP_ROWS;
}
@Override
public boolean supportsAliasInSelect() {
return true;
}
@Override
public boolean supportsAliasInUpdate() {
return true;
}
@Override
public boolean supportsTableDropCascade() {
return true;
}
@Override
public boolean isDelimiterAfterBlock() {
return true;
}
@NotNull
@Override
public DBDBinaryFormatter getNativeBinaryFormatter() {
return OracleBinaryFormatter.INSTANCE;
}
@Nullable
@Override
public String getDualTableName() {
return "DUAL";
}
@NotNull
@Override
protected String[] getNonTransactionKeywords() {
return ORACLE_NON_TRANSACTIONAL_KEYWORDS;
}
@Override
protected String getStoredProcedureCallInitialClause(DBSProcedure proc) {
String schemaName = proc.getParentObject().getName();
return "CALL " + schemaName + "." + proc.getName();
}
@Override
public boolean isDisableScriptEscapeProcessing() {
return preferenceStore == null || preferenceStore.getBoolean(OracleConstants.PREF_DISABLE_SCRIPT_ESCAPE_PROCESSING);
}
@Override
public String getScriptDelimiter() {
return super.getScriptDelimiter();
}
@Override
public boolean isCRLFBroken() {
return crlfBroken;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.hadoop.igfs;
import org.apache.commons.logging.*;
import org.apache.ignite.*;
import org.apache.ignite.igfs.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.igfs.common.*;
import org.apache.ignite.internal.processors.igfs.*;
import org.apache.ignite.internal.util.future.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.lang.*;
import org.jetbrains.annotations.*;
import org.jsr166.*;
import java.io.*;
import java.util.*;
import static org.apache.ignite.internal.igfs.common.IgfsIpcCommand.*;
/**
* Communication with external process (TCP or shmem).
*/
public class HadoopIgfsOutProc implements HadoopIgfsEx, HadoopIgfsIpcIoListener {
/** Expected result is boolean. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>, Boolean> BOOL_RES = createClosure();
/** Expected result is boolean. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>, Long> LONG_RES = createClosure();
/** Expected result is {@code IgfsFile}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>, IgfsFile> FILE_RES = createClosure();
/** Expected result is {@code IgfsHandshakeResponse} */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>,
IgfsHandshakeResponse> HANDSHAKE_RES = createClosure();
/** Expected result is {@code IgfsStatus} */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>, IgfsStatus> STATUS_RES =
createClosure();
/** Expected result is {@code IgfsFile}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>,
IgfsInputStreamDescriptor> STREAM_DESCRIPTOR_RES = createClosure();
/** Expected result is {@code IgfsFile}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>,
Collection<IgfsFile>> FILE_COL_RES = createClosure();
/** Expected result is {@code IgfsFile}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>,
Collection<IgfsPath>> PATH_COL_RES = createClosure();
/** Expected result is {@code IgfsPathSummary}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>, IgfsPathSummary> SUMMARY_RES =
createClosure();
/** Expected result is {@code IgfsFile}. */
private static final IgniteClosure<IgniteInternalFuture<IgfsMessage>,
Collection<IgfsBlockLocation>> BLOCK_LOCATION_COL_RES = createClosure();
/** Grid name. */
private final String grid;
/** IGFS name. */
private final String igfs;
/** Client log. */
private final Log log;
/** Client IO. */
private final HadoopIgfsIpcIo io;
/** Event listeners. */
private final Map<Long, HadoopIgfsStreamEventListener> lsnrs = new ConcurrentHashMap8<>();
/**
* Constructor for TCP endpoint.
*
* @param host Host.
* @param port Port.
* @param grid Grid name.
* @param igfs IGFS name.
* @param log Client logger.
* @throws IOException If failed.
*/
public HadoopIgfsOutProc(String host, int port, String grid, String igfs, Log log) throws IOException {
this(host, port, grid, igfs, false, log);
}
/**
* Constructor for shmem endpoint.
*
* @param port Port.
* @param grid Grid name.
* @param igfs IGFS name.
* @param log Client logger.
* @throws IOException If failed.
*/
public HadoopIgfsOutProc(int port, String grid, String igfs, Log log) throws IOException {
this(null, port, grid, igfs, true, log);
}
/**
* Constructor.
*
* @param host Host.
* @param port Port.
* @param grid Grid name.
* @param igfs IGFS name.
* @param shmem Shared memory flag.
* @param log Client logger.
* @throws IOException If failed.
*/
private HadoopIgfsOutProc(String host, int port, String grid, String igfs, boolean shmem, Log log)
throws IOException {
assert host != null && !shmem || host == null && shmem :
"Invalid arguments [host=" + host + ", port=" + port + ", shmem=" + shmem + ']';
String endpoint = host != null ? host + ":" + port : "shmem:" + port;
this.grid = grid;
this.igfs = igfs;
this.log = log;
io = HadoopIgfsIpcIo.get(log, endpoint);
io.addEventListener(this);
}
/** {@inheritDoc} */
@Override public IgfsHandshakeResponse handshake(String logDir) throws IgniteCheckedException {
final IgfsHandshakeRequest req = new IgfsHandshakeRequest();
req.gridName(grid);
req.igfsName(igfs);
req.logDirectory(logDir);
return io.send(req).chain(HANDSHAKE_RES).get();
}
/** {@inheritDoc} */
@Override public void close(boolean force) {
assert io != null;
io.removeEventListener(this);
if (force)
io.forceClose();
else
io.release();
}
/** {@inheritDoc} */
@Override public IgfsFile info(IgfsPath path) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(INFO);
msg.path(path);
return io.send(msg).chain(FILE_RES).get();
}
/** {@inheritDoc} */
@Override public IgfsFile update(IgfsPath path, Map<String, String> props) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(UPDATE);
msg.path(path);
msg.properties(props);
return io.send(msg).chain(FILE_RES).get();
}
/** {@inheritDoc} */
@Override public Boolean setTimes(IgfsPath path, long accessTime, long modificationTime) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(SET_TIMES);
msg.path(path);
msg.accessTime(accessTime);
msg.modificationTime(modificationTime);
return io.send(msg).chain(BOOL_RES).get();
}
/** {@inheritDoc} */
@Override public Boolean rename(IgfsPath src, IgfsPath dest) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(RENAME);
msg.path(src);
msg.destinationPath(dest);
return io.send(msg).chain(BOOL_RES).get();
}
/** {@inheritDoc} */
@Override public Boolean delete(IgfsPath path, boolean recursive) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(DELETE);
msg.path(path);
msg.flag(recursive);
return io.send(msg).chain(BOOL_RES).get();
}
/** {@inheritDoc} */
@Override public Collection<IgfsBlockLocation> affinity(IgfsPath path, long start, long len)
throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(AFFINITY);
msg.path(path);
msg.start(start);
msg.length(len);
return io.send(msg).chain(BLOCK_LOCATION_COL_RES).get();
}
/** {@inheritDoc} */
@Override public IgfsPathSummary contentSummary(IgfsPath path) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(PATH_SUMMARY);
msg.path(path);
return io.send(msg).chain(SUMMARY_RES).get();
}
/** {@inheritDoc} */
@Override public Boolean mkdirs(IgfsPath path, Map<String, String> props) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(MAKE_DIRECTORIES);
msg.path(path);
msg.properties(props);
return io.send(msg).chain(BOOL_RES).get();
}
/** {@inheritDoc} */
@Override public Collection<IgfsFile> listFiles(IgfsPath path) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(LIST_FILES);
msg.path(path);
return io.send(msg).chain(FILE_COL_RES).get();
}
/** {@inheritDoc} */
@Override public Collection<IgfsPath> listPaths(IgfsPath path) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(LIST_PATHS);
msg.path(path);
return io.send(msg).chain(PATH_COL_RES).get();
}
/** {@inheritDoc} */
@Override public IgfsStatus fsStatus() throws IgniteCheckedException {
return io.send(new IgfsStatusRequest()).chain(STATUS_RES).get();
}
/** {@inheritDoc} */
@Override public HadoopIgfsStreamDelegate open(IgfsPath path) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(OPEN_READ);
msg.path(path);
msg.flag(false);
IgfsInputStreamDescriptor rmtDesc = io.send(msg).chain(STREAM_DESCRIPTOR_RES).get();
return new HadoopIgfsStreamDelegate(this, rmtDesc.streamId(), rmtDesc.length());
}
/** {@inheritDoc} */
@Override public HadoopIgfsStreamDelegate open(IgfsPath path,
int seqReadsBeforePrefetch) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(OPEN_READ);
msg.path(path);
msg.flag(true);
msg.sequentialReadsBeforePrefetch(seqReadsBeforePrefetch);
IgfsInputStreamDescriptor rmtDesc = io.send(msg).chain(STREAM_DESCRIPTOR_RES).get();
return new HadoopIgfsStreamDelegate(this, rmtDesc.streamId(), rmtDesc.length());
}
/** {@inheritDoc} */
@Override public HadoopIgfsStreamDelegate create(IgfsPath path, boolean overwrite, boolean colocate,
int replication, long blockSize, @Nullable Map<String, String> props) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(OPEN_CREATE);
msg.path(path);
msg.flag(overwrite);
msg.colocate(colocate);
msg.properties(props);
msg.replication(replication);
msg.blockSize(blockSize);
Long streamId = io.send(msg).chain(LONG_RES).get();
return new HadoopIgfsStreamDelegate(this, streamId);
}
/** {@inheritDoc} */
@Override public HadoopIgfsStreamDelegate append(IgfsPath path, boolean create,
@Nullable Map<String, String> props) throws IgniteCheckedException {
final IgfsPathControlRequest msg = new IgfsPathControlRequest();
msg.command(OPEN_APPEND);
msg.path(path);
msg.flag(create);
msg.properties(props);
Long streamId = io.send(msg).chain(LONG_RES).get();
return new HadoopIgfsStreamDelegate(this, streamId);
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<byte[]> readData(HadoopIgfsStreamDelegate desc, long pos, int len,
final @Nullable byte[] outBuf, final int outOff, final int outLen) {
assert len > 0;
final IgfsStreamControlRequest msg = new IgfsStreamControlRequest();
msg.command(READ_BLOCK);
msg.streamId((long) desc.target());
msg.position(pos);
msg.length(len);
try {
return io.send(msg, outBuf, outOff, outLen);
}
catch (IgniteCheckedException e) {
return new GridFinishedFuture<>(e);
}
}
/** {@inheritDoc} */
@Override public void writeData(HadoopIgfsStreamDelegate desc, byte[] data, int off, int len)
throws IOException {
final IgfsStreamControlRequest msg = new IgfsStreamControlRequest();
msg.command(WRITE_BLOCK);
msg.streamId((long) desc.target());
msg.data(data);
msg.position(off);
msg.length(len);
try {
io.sendPlain(msg);
}
catch (IgniteCheckedException e) {
throw HadoopIgfsUtils.cast(e);
}
}
/** {@inheritDoc} */
@Override public void flush(HadoopIgfsStreamDelegate delegate) throws IOException {
// No-op.
}
/** {@inheritDoc} */
@Override public void closeStream(HadoopIgfsStreamDelegate desc) throws IOException {
final IgfsStreamControlRequest msg = new IgfsStreamControlRequest();
msg.command(CLOSE);
msg.streamId((long)desc.target());
try {
io.send(msg).chain(BOOL_RES).get();
}
catch (IgniteCheckedException e) {
throw HadoopIgfsUtils.cast(e);
}
}
/** {@inheritDoc} */
@Override public void addEventListener(HadoopIgfsStreamDelegate desc,
HadoopIgfsStreamEventListener lsnr) {
long streamId = desc.target();
HadoopIgfsStreamEventListener lsnr0 = lsnrs.put(streamId, lsnr);
assert lsnr0 == null || lsnr0 == lsnr;
if (log.isDebugEnabled())
log.debug("Added stream event listener [streamId=" + streamId + ']');
}
/** {@inheritDoc} */
@Override public void removeEventListener(HadoopIgfsStreamDelegate desc) {
long streamId = desc.target();
HadoopIgfsStreamEventListener lsnr0 = lsnrs.remove(streamId);
if (lsnr0 != null && log.isDebugEnabled())
log.debug("Removed stream event listener [streamId=" + streamId + ']');
}
/** {@inheritDoc} */
@Override public void onClose() {
for (HadoopIgfsStreamEventListener lsnr : lsnrs.values()) {
try {
lsnr.onClose();
}
catch (IgniteCheckedException e) {
log.warn("Got exception from stream event listener (will ignore): " + lsnr, e);
}
}
}
/** {@inheritDoc} */
@Override public void onError(long streamId, String errMsg) {
HadoopIgfsStreamEventListener lsnr = lsnrs.get(streamId);
if (lsnr != null)
lsnr.onError(errMsg);
else
log.warn("Received write error response for not registered output stream (will ignore) " +
"[streamId= " + streamId + ']');
}
/**
* Creates conversion closure for given type.
*
* @param <T> Type of expected result.
* @return Conversion closure.
*/
@SuppressWarnings("unchecked")
private static <T> IgniteClosure<IgniteInternalFuture<IgfsMessage>, T> createClosure() {
return new IgniteClosure<IgniteInternalFuture<IgfsMessage>, T>() {
@Override public T apply(IgniteInternalFuture<IgfsMessage> fut) {
try {
IgfsControlResponse res = (IgfsControlResponse)fut.get();
if (res.hasError())
res.throwError();
return (T)res.response();
}
catch (IgfsException | IgniteCheckedException e) {
throw new GridClosureException(e);
}
}
};
}
}
| |
/*
* Copyright (c) 2010-2016. Axon Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.axonframework.commandhandling.model;
import org.axonframework.commandhandling.model.inspection.AggregateModel;
import org.axonframework.commandhandling.model.inspection.ModelInspector;
import org.axonframework.common.Assert;
import org.axonframework.messaging.annotation.ParameterResolverFactory;
import org.axonframework.messaging.unitofwork.CurrentUnitOfWork;
import org.axonframework.messaging.unitofwork.UnitOfWork;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* Abstract implementation of the {@link Repository} that takes care of the dispatching of events when an aggregate is
* persisted. All uncommitted events on an aggregate are dispatched when the aggregate is saved.
* <p>
* Note that this repository implementation does not take care of any locking. The underlying persistence is expected
* to deal with concurrency. Alternatively, consider using the {@link LockingRepository}.
*
* @param <T> The type of aggregate this repository stores
* @author Allard Buijze
* @see LockingRepository
* @since 0.1
*/
public abstract class AbstractRepository<T, A extends Aggregate<T>> implements Repository<T> {
private final String aggregatesKey = this + "_AGGREGATES";
private final Class<T> aggregateType;
private final AggregateModel<T> aggregateModel;
/**
* Initializes a repository that stores aggregate of the given {@code aggregateType}. All aggregates in this
* repository must be {@code instanceOf} this aggregate type.
*
* @param aggregateType The type of aggregate stored in this repository
*/
protected AbstractRepository(Class<T> aggregateType) {
Assert.notNull(aggregateType, "aggregateType may not be null");
this.aggregateType = aggregateType;
this.aggregateModel = ModelInspector.inspectAggregate(aggregateType);
}
/**
* Initializes a repository that stores aggregate of the given {@code aggregateType}. All aggregates in this
* repository must be {@code instanceOf} this aggregate type.
*
* @param aggregateType The type of aggregate stored in this repository
* @param parameterResolverFactory The parameter resolver factory used to resolve parameters of annotated handlers
*/
protected AbstractRepository(Class<T> aggregateType, ParameterResolverFactory parameterResolverFactory) {
Assert.notNull(aggregateType, "aggregateType may not be null");
Assert.notNull(parameterResolverFactory, "parameterResolverFactory may not be null");
this.aggregateType = aggregateType;
this.aggregateModel = ModelInspector.inspectAggregate(aggregateType, parameterResolverFactory);
}
@Override
public A newInstance(Callable<T> factoryMethod) throws Exception {
A aggregate = doCreateNew(factoryMethod);
aggregate.execute(root -> Assert.isTrue(aggregateType.isInstance(root),
"Unsuitable aggregate for this repository: wrong type"));
UnitOfWork<?> uow = CurrentUnitOfWork.get();
Map<String, Aggregate<T>> aggregates = uow.root().getOrComputeResource(aggregatesKey, s -> new HashMap<>());
Assert.isTrue(aggregates.putIfAbsent(aggregate.identifierAsString(), aggregate) == null,
"The Unit of Work already has an Aggregate with the same identifier");
uow.onRollback(u -> aggregates.remove(aggregate.identifierAsString()));
prepareForCommit(aggregate);
return aggregate;
}
protected abstract A doCreateNew(Callable<T> factoryMethod) throws Exception;
/**
* @throws AggregateNotFoundException if aggregate with given id cannot be found
* @throws RuntimeException any exception thrown by implementing classes
*/
@Override
public A load(String aggregateIdentifier, Long expectedVersion) {
UnitOfWork<?> uow = CurrentUnitOfWork.get();
Map<String, A> aggregates = uow.root().getOrComputeResource(aggregatesKey, s -> new HashMap<>());
A aggregate = aggregates.computeIfAbsent(aggregateIdentifier,
s -> doLoad(aggregateIdentifier, expectedVersion));
uow.onRollback(u -> aggregates.remove(aggregateIdentifier));
validateOnLoad(aggregate, expectedVersion);
prepareForCommit(aggregate);
return aggregate;
}
@Override
public A load(String aggregateIdentifier) {
return load(aggregateIdentifier, null);
}
/**
* Checks the aggregate for concurrent changes. Throws a
* {@link ConflictingModificationException} when conflicting changes have been
* detected.
* <p>
* This implementation throws a {@link ConflictingAggregateVersionException} if the expected version is not null
* and the version number of the aggregate does not match the expected version
*
* @param aggregate The loaded aggregate
* @param expectedVersion The expected version of the aggregate
* @throws ConflictingModificationException
* @throws ConflictingAggregateVersionException
*/
protected void validateOnLoad(Aggregate<T> aggregate, Long expectedVersion) {
if (expectedVersion != null && aggregate.version() != null &&
!expectedVersion.equals(aggregate.version())) {
throw new ConflictingAggregateVersionException(aggregate.identifierAsString(),
expectedVersion,
aggregate.version());
}
}
/**
* Register handlers with the current Unit of Work that save or delete the given {@code aggregate} when
* the Unit of Work is committed.
*
* @param aggregate The Aggregate to save or delete when the Unit of Work is committed
*/
protected void prepareForCommit(A aggregate) {
CurrentUnitOfWork.get().onPrepareCommit(u -> {
if (aggregate.isDeleted()) {
doDelete(aggregate);
} else {
doSave(aggregate);
}
if (aggregate.isDeleted()) {
postDelete(aggregate);
} else {
postSave(aggregate);
}
});
}
protected AggregateModel<T> aggregateModel() {
return aggregateModel;
}
/**
* Returns the aggregate type stored by this repository.
*
* @return the aggregate type stored by this repository
*/
protected Class<T> getAggregateType() {
return aggregateType;
}
/**
* Performs the actual saving of the aggregate.
*
* @param aggregate the aggregate to store
*/
protected abstract void doSave(A aggregate);
/**
* Loads and initialized the aggregate with the given aggregateIdentifier.
*
* @param aggregateIdentifier the identifier of the aggregate to load
* @param expectedVersion The expected version of the aggregate to load
* @return a fully initialized aggregate
* @throws AggregateNotFoundException if the aggregate with given identifier does not exist
*/
protected abstract A doLoad(String aggregateIdentifier, Long expectedVersion);
/**
* Removes the aggregate from the repository. Typically, the repository should ensure that any calls to {@link
* #doLoad(String, Long)} throw a {@link AggregateNotFoundException} when
* loading a deleted aggregate.
*
* @param aggregate the aggregate to delete
*/
protected abstract void doDelete(A aggregate);
/**
* Perform action that needs to be done directly after updating an aggregate and committing the aggregate's
* uncommitted events.
*
* @param aggregate The aggregate instance being saved
*/
protected void postSave(A aggregate) {
}
/**
* Perform action that needs to be done directly after deleting an aggregate and committing the aggregate's
* uncommitted events.
*
* @param aggregate The aggregate instance being saved
*/
protected void postDelete(A aggregate) {
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
/**
* This defines the core properties and functions to operate on a field.
*/
public abstract class MappedFieldType extends FieldType {
public static class Names {
private final String indexName;
private final String originalIndexName;
private final String fullName;
public Names(String name) {
this(name, name, name);
}
public Names(String indexName, String originalIndexName, String fullName) {
this.indexName = indexName;
this.originalIndexName = originalIndexName;
this.fullName = fullName;
}
/**
* The indexed name of the field. This is the name under which we will
* store it in the index.
*/
public String indexName() {
return indexName;
}
/**
* The original index name, before any "path" modifications performed on it.
*/
public String originalIndexName() {
return originalIndexName;
}
/**
* The full name, including dot path.
*/
public String fullName() {
return fullName;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Names names = (Names) o;
if (!fullName.equals(names.fullName)) return false;
if (!indexName.equals(names.indexName)) return false;
if (!originalIndexName.equals(names.originalIndexName)) return false;
return true;
}
@Override
public int hashCode() {
int result = indexName.hashCode();
result = 31 * result + originalIndexName.hashCode();
result = 31 * result + fullName.hashCode();
return result;
}
}
public enum Loading {
LAZY {
@Override
public String toString() {
return LAZY_VALUE;
}
},
EAGER {
@Override
public String toString() {
return EAGER_VALUE;
}
},
EAGER_GLOBAL_ORDINALS {
@Override
public String toString() {
return EAGER_GLOBAL_ORDINALS_VALUE;
}
};
public static final String KEY = "loading";
public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals";
public static final String EAGER_VALUE = "eager";
public static final String LAZY_VALUE = "lazy";
public static Loading parse(String loading, Loading defaultValue) {
if (Strings.isNullOrEmpty(loading)) {
return defaultValue;
} else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) {
return EAGER_GLOBAL_ORDINALS;
} else if (EAGER_VALUE.equalsIgnoreCase(loading)) {
return EAGER;
} else if (LAZY_VALUE.equalsIgnoreCase(loading)) {
return LAZY;
} else {
throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]");
}
}
}
private Names names;
private float boost;
// TODO: remove this docvalues flag and use docValuesType
private boolean docValues;
private NamedAnalyzer indexAnalyzer;
private NamedAnalyzer searchAnalyzer;
private NamedAnalyzer searchQuoteAnalyzer;
private SimilarityProvider similarity;
private Loading normsLoading;
private FieldDataType fieldDataType;
private Object nullValue;
private String nullValueAsString; // for sending null value to _all field
protected MappedFieldType(MappedFieldType ref) {
super(ref);
this.names = ref.names();
this.boost = ref.boost();
this.docValues = ref.hasDocValues();
this.indexAnalyzer = ref.indexAnalyzer();
this.searchAnalyzer = ref.searchAnalyzer();
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer();
this.similarity = ref.similarity();
this.normsLoading = ref.normsLoading();
this.fieldDataType = ref.fieldDataType();
this.nullValue = ref.nullValue();
this.nullValueAsString = ref.nullValueAsString();
}
public MappedFieldType() {
setTokenized(true);
setStored(false);
setStoreTermVectors(false);
setOmitNorms(false);
setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
setBoost(1.0f);
fieldDataType = new FieldDataType(typeName());
}
@Override
public abstract MappedFieldType clone();
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
MappedFieldType fieldType = (MappedFieldType) o;
// check similarity first because we need to check the name, and it might be null
// TODO: SimilarityProvider should have equals?
if (similarity == null || fieldType.similarity == null) {
if (similarity != fieldType.similarity) {
return false;
}
} else {
if (Objects.equals(similarity.name(), fieldType.similarity.name()) == false) {
return false;
}
}
return boost == fieldType.boost &&
docValues == fieldType.docValues &&
Objects.equals(names, fieldType.names) &&
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
Objects.equals(normsLoading, fieldType.normsLoading) &&
Objects.equals(fieldDataType, fieldType.fieldDataType) &&
Objects.equals(nullValue, fieldType.nullValue) &&
Objects.equals(nullValueAsString, fieldType.nullValueAsString);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), names, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
similarity == null ? null : similarity.name(), normsLoading, fieldDataType, nullValue, nullValueAsString);
}
// norelease: we need to override freeze() and add safety checks that all settings are actually set
/** Returns the name of this type, as would be specified in mapping properties */
public abstract String typeName();
/** Checks this type is the same type as other. Adds a conflict if they are different. */
private final void checkTypeName(MappedFieldType other) {
if (typeName().equals(other.typeName()) == false) {
throw new IllegalArgumentException("mapper [" + names().fullName() + "] cannot be changed from type [" + typeName() + "] to [" + other.typeName() + "]");
} else if (getClass() != other.getClass()) {
throw new IllegalStateException("Type names equal for class " + getClass().getSimpleName() + " and " + other.getClass().getSimpleName());
}
}
/**
* Checks for any conflicts between this field type and other.
* If strict is true, all properties must be equal.
* Otherwise, only properties which must never change in an index are checked.
*/
public void checkCompatibility(MappedFieldType other, List<String> conflicts, boolean strict) {
checkTypeName(other);
boolean indexed = indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE;
// TODO: should be validating if index options go "up" (but "down" is ok)
if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) {
conflicts.add("mapper [" + names().fullName() + "] has different [index] values");
}
if (stored() != other.stored()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store] values");
}
if (hasDocValues() == false && other.hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitly set
// when the doc_values field data format is configured
conflicts.add("mapper [" + names().fullName() + "] has different [doc_values] values, cannot change from disabled to enabled");
}
if (omitNorms() && !other.omitNorms()) {
conflicts.add("mapper [" + names().fullName() + "] has different [omit_norms] values, cannot change from disable to enabled");
}
if (storeTermVectors() != other.storeTermVectors()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector] values");
}
if (storeTermVectorOffsets() != other.storeTermVectorOffsets()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_offsets] values");
}
if (storeTermVectorPositions() != other.storeTermVectorPositions()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_positions] values");
}
if (storeTermVectorPayloads() != other.storeTermVectorPayloads()) {
conflicts.add("mapper [" + names().fullName() + "] has different [store_term_vector_payloads] values");
}
// null and "default"-named index analyzers both mean the default is used
if (indexAnalyzer() == null || "default".equals(indexAnalyzer().name())) {
if (other.indexAnalyzer() != null && "default".equals(other.indexAnalyzer().name()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
}
} else if (other.indexAnalyzer() == null || "default".equals(other.indexAnalyzer().name())) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
} else if (indexAnalyzer().name().equals(other.indexAnalyzer().name()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [analyzer]");
}
if (!names().indexName().equals(other.names().indexName())) {
conflicts.add("mapper [" + names().fullName() + "] has different [index_name]");
}
if (Objects.equals(similarity(), other.similarity()) == false) {
conflicts.add("mapper [" + names().fullName() + "] has different [similarity]");
}
if (strict) {
if (omitNorms() != other.omitNorms()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [omit_norms] across all types.");
}
if (boost() != other.boost()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [boost] across all types.");
}
if (normsLoading() != other.normsLoading()) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [norms.loading] across all types.");
}
if (Objects.equals(searchAnalyzer(), other.searchAnalyzer()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_analyzer] across all types.");
}
if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types.");
}
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
}
if (Objects.equals(nullValue(), other.nullValue()) == false) {
conflicts.add("mapper [" + names().fullName() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
}
}
}
public boolean isNumeric() {
return false;
}
public boolean isSortable() {
return true;
}
public Names names() {
return names;
}
public void setNames(Names names) {
checkIfFrozen();
this.names = names;
}
public float boost() {
return boost;
}
public void setBoost(float boost) {
checkIfFrozen();
this.boost = boost;
}
public FieldDataType fieldDataType() {
return fieldDataType;
}
public void setFieldDataType(FieldDataType fieldDataType) {
checkIfFrozen();
this.fieldDataType = fieldDataType;
}
public boolean hasDocValues() {
return docValues;
}
public void setHasDocValues(boolean hasDocValues) {
checkIfFrozen();
this.docValues = hasDocValues;
}
public Loading normsLoading() {
return normsLoading;
}
public void setNormsLoading(Loading normsLoading) {
checkIfFrozen();
this.normsLoading = normsLoading;
}
public NamedAnalyzer indexAnalyzer() {
return indexAnalyzer;
}
public void setIndexAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.indexAnalyzer = analyzer;
}
public NamedAnalyzer searchAnalyzer() {
return searchAnalyzer;
}
public void setSearchAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.searchAnalyzer = analyzer;
}
public NamedAnalyzer searchQuoteAnalyzer() {
return searchQuoteAnalyzer == null ? searchAnalyzer : searchQuoteAnalyzer;
}
public void setSearchQuoteAnalyzer(NamedAnalyzer analyzer) {
checkIfFrozen();
this.searchQuoteAnalyzer = analyzer;
}
public SimilarityProvider similarity() {
return similarity;
}
public void setSimilarity(SimilarityProvider similarity) {
checkIfFrozen();
this.similarity = similarity;
}
/** Returns the value that should be added when JSON null is found, or null if no value should be added */
public Object nullValue() {
return nullValue;
}
/** Returns the null value stringified, so it can be used for e.g. _all field, or null if there is no null value */
public String nullValueAsString() {
return nullValueAsString;
}
/** Sets the null value and initializes the string version */
public void setNullValue(Object nullValue) {
checkIfFrozen();
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
/** Returns the actual value of the field. */
public Object value(Object value) {
return value;
}
/** Returns the value that will be used as a result for search. Can be only of specific types... */
public Object valueForSearch(Object value) {
return value;
}
/** Returns the indexed value used to construct search "values". */
public BytesRef indexedValueForSearch(Object value) {
return BytesRefs.toBytesRef(value);
}
/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryShardContext)} be used when detecting this
* field in query string.
*/
public boolean useTermQueryWithQueryString() {
return false;
}
/** Creates a term associated with the field of this mapper for the given value */
protected Term createTerm(Object value) {
return new Term(names().indexName(), indexedValueForSearch(value));
}
public Query termQuery(Object value, @Nullable QueryShardContext context) {
return new TermQuery(createTerm(value));
}
public Query termsQuery(List values, @Nullable QueryShardContext context) {
BytesRef[] bytesRefs = new BytesRef[values.size()];
for (int i = 0; i < bytesRefs.length; i++) {
bytesRefs[i] = indexedValueForSearch(values.get(i));
}
return new TermsQuery(names.indexName(), bytesRefs);
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return new TermRangeQuery(names().indexName(),
lowerTerm == null ? null : indexedValueForSearch(lowerTerm),
upperTerm == null ? null : indexedValueForSearch(upperTerm),
includeLower, includeUpper);
}
public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return new FuzzyQuery(createTerm(value), fuzziness.asDistance(BytesRefs.toString(value)), prefixLength, maxExpansions, transpositions);
}
public Query prefixQuery(String value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
PrefixQuery query = new PrefixQuery(createTerm(value));
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryShardContext context) {
if (numericType() != null) {
throw new QueryShardException(context, "Cannot use regular expression to filter numeric field [" + names.fullName + "]");
}
RegexpQuery query = new RegexpQuery(createTerm(value), flags, maxDeterminizedStates);
if (method != null) {
query.setRewriteMethod(method);
}
return query;
}
public Query nullValueQuery() {
if (nullValue == null) {
return null;
}
return new ConstantScoreQuery(termQuery(nullValue, null));
}
/**
* @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance.
*/
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
return new FieldStats.Text(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), terms.getMin(), terms.getMax()
);
}
/** A term query to use when parsing a query string. Can return <tt>null</tt>. */
@Nullable
public Query queryStringTermQuery(Term term) {
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.facebook;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Array;
import java.net.URLDecoder;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import facebook4j.Facebook;
import facebook4j.Reading;
import facebook4j.json.DataObjectFactory;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.component.facebook.data.FacebookMethodsType;
import org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.MatchType;
import org.apache.camel.component.facebook.data.FacebookPropertiesHelper;
import org.apache.camel.component.facebook.data.ReadingBuilder;
import org.apache.camel.support.ScheduledPollConsumer;
import static org.apache.camel.component.facebook.FacebookConstants.FACEBOOK_DATE_FORMAT;
import static org.apache.camel.component.facebook.FacebookConstants.READING_PREFIX;
import static org.apache.camel.component.facebook.FacebookConstants.READING_PROPERTY;
import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.filterMethods;
import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getHighestPriorityMethod;
import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getMissingProperties;
import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.invokeMethod;
/**
* The Facebook consumer.
*/
public class FacebookConsumer extends ScheduledPollConsumer {
private static final String SINCE_PREFIX = "since=";
private final FacebookEndpoint endpoint;
private final FacebookMethodsType method;
private final Map<String, Object> endpointProperties;
private String sinceTime;
private String untilTime;
public FacebookConsumer(FacebookEndpoint endpoint, Processor processor) {
super(endpoint, processor);
this.endpoint = endpoint;
// determine the consumer method to invoke
this.method = findMethod();
// get endpoint properties in a map
final HashMap<String, Object> properties = new HashMap<>();
FacebookPropertiesHelper.getEndpointProperties(endpoint.getConfiguration(), properties);
// skip since and until fields?
final Reading reading = (Reading) properties.get(READING_PROPERTY);
if (reading != null) {
final String queryString = reading.toString();
if (queryString.contains(SINCE_PREFIX)) {
// use the user supplied value to start with
final int startIndex = queryString.indexOf(SINCE_PREFIX) + SINCE_PREFIX.length();
int endIndex = queryString.indexOf('&', startIndex);
if (endIndex == -1) {
// ignore the closing square bracket
endIndex = queryString.length() - 1;
}
final String strSince = queryString.substring(startIndex, endIndex);
try {
this.sinceTime = URLDecoder.decode(strSince, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeCamelException(String.format("Error decoding %s.since with value %s due to: %s", READING_PREFIX, strSince, e.getMessage()), e);
}
log.debug("Using supplied property {}since value {}", READING_PREFIX, this.sinceTime);
}
if (queryString.contains("until=")) {
log.debug("Overriding configured property {}until", READING_PREFIX);
}
}
this.endpointProperties = Collections.unmodifiableMap(properties);
}
@Override
public boolean isGreedy() {
// make this consumer not greedy to avoid making too many Facebook calls
return false;
}
private FacebookMethodsType findMethod() {
FacebookMethodsType result;
// find one that takes the largest subset of endpoint parameters
final Set<String> argNames = new HashSet<>();
argNames.addAll(FacebookPropertiesHelper.getEndpointPropertyNames(endpoint.getConfiguration()));
// add reading property for polling, if it doesn't already exist!
argNames.add(READING_PROPERTY);
final String[] argNamesArray = argNames.toArray(new String[argNames.size()]);
List<FacebookMethodsType> filteredMethods = filterMethods(
endpoint.getCandidates(), MatchType.SUPER_SET, argNamesArray);
if (filteredMethods.isEmpty()) {
throw new IllegalArgumentException(
String.format("Missing properties for %s, need one or more from %s",
endpoint.getMethod(),
getMissingProperties(endpoint.getMethod(), endpoint.getNameStyle(), argNames)));
} else if (filteredMethods.size() == 1) {
// single match
result = filteredMethods.get(0);
} else {
result = getHighestPriorityMethod(filteredMethods);
log.warn("Using highest priority method {} from methods {}", method, filteredMethods);
}
return result;
}
@Override
protected int poll() throws Exception {
// invoke the consumer method
final Map<String, Object> args = getMethodArguments();
try {
// also check whether we need to get raw JSON
String rawJSON = null;
Object result;
if (endpoint.getConfiguration().getJsonStoreEnabled() == null
|| !endpoint.getConfiguration().getJsonStoreEnabled()) {
result = invokeMethod(endpoint.getConfiguration().getFacebook(),
method, args);
} else {
final Facebook facebook = endpoint.getConfiguration().getFacebook();
synchronized (facebook) {
result = invokeMethod(facebook, method, args);
rawJSON = DataObjectFactory.getRawJSON(result);
}
}
// process result according to type
if (result != null && (result instanceof Collection || result.getClass().isArray())) {
// create an exchange for every element
final Object array = getResultAsArray(result);
final int length = Array.getLength(array);
for (int i = 0; i < length; i++) {
processResult(Array.get(array, i), rawJSON);
}
return length;
} else {
processResult(result, rawJSON);
return 1; // number of messages polled
}
} catch (Throwable t) {
throw RuntimeCamelException.wrapRuntimeCamelException(t);
}
}
private void processResult(Object result, String rawJSON) throws Exception {
Exchange exchange = endpoint.createExchange();
exchange.getIn().setBody(result);
if (rawJSON != null) {
exchange.getIn().setHeader(FacebookConstants.RAW_JSON_HEADER, rawJSON);
}
try {
// send message to next processor in the route
getProcessor().process(exchange);
} finally {
// log exception if an exception occurred and was not handled
if (exchange.getException() != null) {
getExceptionHandler().handleException("Error processing exchange", exchange, exchange.getException());
}
}
}
private Object getResultAsArray(Object result) {
if (result.getClass().isArray()) {
// no conversion needed
return result;
}
// must be a Collection
// TODO add support for Paging using ResponseList
Collection<?> collection = (Collection<?>) result;
return collection.toArray(new Object[collection.size()]);
}
private Map<String, Object> getMethodArguments() {
// start by setting the Reading since and until fields,
// these are used to avoid reading duplicate results across polls
Map<String, Object> arguments = new HashMap<>();
arguments.putAll(endpointProperties);
Reading reading = (Reading) arguments.remove(READING_PROPERTY);
if (reading == null) {
reading = new Reading();
} else {
try {
reading = ReadingBuilder.copy(reading, true);
} catch (NoSuchFieldException e) {
throw new IllegalArgumentException(String.format("Error creating property [%s]: %s",
READING_PROPERTY, e.getMessage()), e);
} catch (IllegalAccessException e) {
throw new IllegalArgumentException(String.format("Error creating property [%s]: %s",
READING_PROPERTY, e.getMessage()), e);
}
}
// now set since and until for this poll
final SimpleDateFormat dateFormat = new SimpleDateFormat(FACEBOOK_DATE_FORMAT);
final long currentMillis = System.currentTimeMillis();
if (this.sinceTime == null) {
// first poll, set this to (current time - initial poll delay)
final Date startTime = new Date(currentMillis
- TimeUnit.MILLISECONDS.convert(getInitialDelay(), getTimeUnit()));
this.sinceTime = dateFormat.format(startTime);
} else if (this.untilTime != null) {
// use the last 'until' time
this.sinceTime = this.untilTime;
}
this.untilTime = dateFormat.format(new Date(currentMillis));
reading.since(this.sinceTime);
reading.until(this.untilTime);
arguments.put(READING_PROPERTY, reading);
return arguments;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lexruntimev2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Container for text that is returned to the customer..
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/runtime.lex.v2-2020-08-07/Message" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Message implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The text of the message.
* </p>
*/
private String content;
/**
* <p>
* Indicates the type of response.
* </p>
*/
private String contentType;
private ImageResponseCard imageResponseCard;
/**
* <p>
* The text of the message.
* </p>
*
* @param content
* The text of the message.
*/
public void setContent(String content) {
this.content = content;
}
/**
* <p>
* The text of the message.
* </p>
*
* @return The text of the message.
*/
public String getContent() {
return this.content;
}
/**
* <p>
* The text of the message.
* </p>
*
* @param content
* The text of the message.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Message withContent(String content) {
setContent(content);
return this;
}
/**
* <p>
* Indicates the type of response.
* </p>
*
* @param contentType
* Indicates the type of response.
* @see MessageContentType
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
/**
* <p>
* Indicates the type of response.
* </p>
*
* @return Indicates the type of response.
* @see MessageContentType
*/
public String getContentType() {
return this.contentType;
}
/**
* <p>
* Indicates the type of response.
* </p>
*
* @param contentType
* Indicates the type of response.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MessageContentType
*/
public Message withContentType(String contentType) {
setContentType(contentType);
return this;
}
/**
* <p>
* Indicates the type of response.
* </p>
*
* @param contentType
* Indicates the type of response.
* @return Returns a reference to this object so that method calls can be chained together.
* @see MessageContentType
*/
public Message withContentType(MessageContentType contentType) {
this.contentType = contentType.toString();
return this;
}
/**
* @param imageResponseCard
*/
public void setImageResponseCard(ImageResponseCard imageResponseCard) {
this.imageResponseCard = imageResponseCard;
}
/**
* @return
*/
public ImageResponseCard getImageResponseCard() {
return this.imageResponseCard;
}
/**
* @param imageResponseCard
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Message withImageResponseCard(ImageResponseCard imageResponseCard) {
setImageResponseCard(imageResponseCard);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getContent() != null)
sb.append("Content: ").append("***Sensitive Data Redacted***").append(",");
if (getContentType() != null)
sb.append("ContentType: ").append(getContentType()).append(",");
if (getImageResponseCard() != null)
sb.append("ImageResponseCard: ").append(getImageResponseCard());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Message == false)
return false;
Message other = (Message) obj;
if (other.getContent() == null ^ this.getContent() == null)
return false;
if (other.getContent() != null && other.getContent().equals(this.getContent()) == false)
return false;
if (other.getContentType() == null ^ this.getContentType() == null)
return false;
if (other.getContentType() != null && other.getContentType().equals(this.getContentType()) == false)
return false;
if (other.getImageResponseCard() == null ^ this.getImageResponseCard() == null)
return false;
if (other.getImageResponseCard() != null && other.getImageResponseCard().equals(this.getImageResponseCard()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getContent() == null) ? 0 : getContent().hashCode());
hashCode = prime * hashCode + ((getContentType() == null) ? 0 : getContentType().hashCode());
hashCode = prime * hashCode + ((getImageResponseCard() == null) ? 0 : getImageResponseCard().hashCode());
return hashCode;
}
@Override
public Message clone() {
try {
return (Message) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.lexruntimev2.model.transform.MessageMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.sparkplugin.ui.call;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.GradientPaint;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSeparator;
import net.java.sipmack.sip.InterlocutorUI;
import net.java.sipmack.softphone.SoftPhoneManager;
import net.java.sipmack.softphone.SoftPhoneManager.CallRoomState;
import org.jivesoftware.spark.ChatManager;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.component.RolloverButton;
import org.jivesoftware.spark.plugin.phone.resource.PhoneRes;
import org.jivesoftware.spark.ui.ChatRoom;
import org.jivesoftware.spark.util.ModelUtil;
import org.jivesoftware.spark.util.SwingWorker;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkplugin.components.RosterMemberCallButton;
import org.jivesoftware.sparkplugin.ui.PhonePad;
import org.jivesoftware.sparkplugin.ui.components.ControlPanel;
import org.jivesoftware.sparkplugin.ui.components.JavaMixer;
import org.jivesoftware.sparkplugin.ui.transfer.TransferManager;
/**
* The UI for calls with Roster members.
*
* @author Derek DeMoro
*/
public class RosterMemberPanel extends PhonePanel {
private static final long serialVersionUID = -327742794852188962L;
private JLabel connectedLabel;
private String phoneNumber;
private JLabel phoneLabel;
private PreviousConversationPanel historyPanel;
private boolean onHold;
private boolean muted;
private RosterMemberCallButton muteButton;
private RosterMemberCallButton holdButton;
private RosterMemberCallButton transferButton;
private RolloverButton hangUpButton;
private SoftPhoneManager softPhone;
private static String CONNECTED = PhoneRes.getIString("phone.connected");
private InterlocutorUI activeCall;
private CallManager callManager;
private final Color greenColor = new Color(91, 175, 41);
private final Color orangeColor = new Color(229, 139, 11);
private boolean callWasTransferred;
private JavaMixer javaMixer = new JavaMixer();
public RosterMemberPanel() {
setLayout(new GridBagLayout());
setBorder(BorderFactory.createLineBorder(Color.lightGray));
callManager = CallManager.getInstance();
// Initilize mixer.
softPhone = SoftPhoneManager.getInstance();
// Build Top Layer
final JPanel topPanel = buildTopPanel();
add(topPanel, new GridBagConstraints(1, 5, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
// Build Control Panel
final JPanel controlPanel = buildControlPanel();
add(controlPanel, new GridBagConstraints(1, 6, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
// Add Previous Conversation
historyPanel = new PreviousConversationPanel();
add(historyPanel, new GridBagConstraints(1, 8, 1, 1, 0.0, 1.0, GridBagConstraints.SOUTH, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 100));
// Setup default settings
setupDefaults();
}
/**
* Builds the information block.
*
* @return the UI representing the Information Block.
*/
private JPanel buildTopPanel() {
final JPanel panel = new JPanel(new GridBagLayout());
panel.setOpaque(false);
// Add phone label
phoneLabel = new JLabel();
phoneLabel.setFont(new Font("Arial", Font.BOLD, 13));
phoneLabel.setForeground(new Color(64, 103, 162));
panel.add(phoneLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 2, 2, 2), 0, 0));
// Add Dial Pad
final RolloverButton dialPadButton = new RolloverButton(PhoneRes.getImageIcon("ICON_NUMBERPAD_IMAGE"));
panel.add(dialPadButton, new GridBagConstraints(1, 0, 1, 3, 1.0, 0.0, GridBagConstraints.NORTHEAST, GridBagConstraints.NONE, new Insets(0, 2, 2, 2), 0, 0));
final PhonePad pad = new PhonePad();
dialPadButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent actionEvent) {
pad.showDialpad(dialPadButton, true);
}
});
// Add Connected Label
connectedLabel = new JLabel(CONNECTED);
connectedLabel.setFont(new Font("Arial", Font.BOLD, 13));
connectedLabel.setHorizontalTextPosition(JLabel.CENTER);
connectedLabel.setHorizontalAlignment(JLabel.CENTER);
panel.add(connectedLabel, new GridBagConstraints(0, 1, 2, 1, 1.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0));
return panel;
}
/**
* Builds the Control Panel.
*
* @return the control panel.
*/
private JPanel buildControlPanel() {
// Add Control Panel
final JPanel mainPanel = new JPanel(new GridBagLayout());
mainPanel.setOpaque(false);
// Initialize Mixer.
// Add Input Volume To Control Panel
try {
final ControlPanel inputPanel = new ControlPanel(new GridBagLayout());
final JLabel inputIcon = new JLabel(PhoneRes.getImageIcon("SPEAKER_IMAGE"));
inputPanel.add(javaMixer.getPrefferedMasterVolume(), new GridBagConstraints(0, 0, 1, 1, 0.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.VERTICAL, new Insets(2, 2, 2, 2), 0, 0));
inputPanel.add(inputIcon, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
mainPanel.add(inputPanel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.2, GridBagConstraints.NORTHWEST, GridBagConstraints.VERTICAL, new Insets(2, 1, 2, 1), 0, 0));
}
catch (Exception e) {
Log.error(e);
}
// Add Output Volume To Control Panel
try {
final ControlPanel outputPanel = new ControlPanel(new GridBagLayout());
final JLabel outputIcon = new JLabel(PhoneRes.getImageIcon("MICROPHONE_IMAGE"));
outputPanel.add(javaMixer.getPrefferedInputVolume(), new GridBagConstraints(0, 0, 1, 1, 0.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.VERTICAL, new Insets(2, 2, 2, 2), 0, 0));
outputPanel.add(outputIcon, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
mainPanel.add(outputPanel, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.2, GridBagConstraints.NORTHWEST, GridBagConstraints.VERTICAL, new Insets(2, 1, 2, 1), 0, 0));
}
catch (Exception e) {
Log.error(e);
}
// Build ControlPanel List
final ControlPanel controlPanel = new ControlPanel(new GridBagLayout());
final JSeparator sep = new JSeparator(JSeparator.HORIZONTAL);
sep.setBackground(new Color(219, 228, 238));
muteButton = new RosterMemberCallButton(PhoneRes.getImageIcon("MUTE_IMAGE").getImage(), PhoneRes.getIString("phone.mute"));
muteButton.setToolTipText(PhoneRes.getIString("phone.tips.mute"));
controlPanel.add(muteButton, new GridBagConstraints(0, 0, 1, 1, 0.0, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
controlPanel.add(sep, new GridBagConstraints(0, 1, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
holdButton = new RosterMemberCallButton(PhoneRes.getImageIcon("ON_HOLD_IMAGE").getImage(), PhoneRes.getIString("phone.hold"));
holdButton.setToolTipText(PhoneRes.getIString("phone.tips.hold"));
controlPanel.add(holdButton, new GridBagConstraints(0, 2, 1, 1, 0.0, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
final JSeparator sep2 = new JSeparator(JSeparator.HORIZONTAL);
sep2.setBackground(new Color(219, 228, 238));
controlPanel.add(sep2, new GridBagConstraints(0, 3, 1, 1, 1.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(0, 0, 0, 0), 0, 0));
transferButton = new RosterMemberCallButton(PhoneRes.getImageIcon("TRANSFER_IMAGE").getImage(), PhoneRes.getIString("phone.transfer"));
transferButton.setToolTipText(PhoneRes.getIString("phone.tips.transfer"));
controlPanel.add(transferButton, new GridBagConstraints(0, 4, 1, 1, 0.0, 1.0, GridBagConstraints.WEST, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0));
// Add Components to Main Panel
mainPanel.add(controlPanel, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(2, 1, 2, 1), 0, 0));
// Add End Call button
hangUpButton = new RolloverButton(" "+PhoneRes.getIString("phone.hangup"), PhoneRes.getImageIcon("HANG_UP_PHONE_77x24_IMAGE"));
hangUpButton.setHorizontalTextPosition(JLabel.CENTER);
hangUpButton.setFont(new Font("Dialog", Font.BOLD, 11));
hangUpButton.setForeground(new Color(153, 32, 10));
hangUpButton.setMargin(new Insets(0, 0, 0, 0));
mainPanel.add(hangUpButton, new GridBagConstraints(0, 1, 3, 1, 0.0, 0.8, GridBagConstraints.NORTH, GridBagConstraints.NONE, new Insets(2, 2, 2, 2), 0, 0));
return mainPanel;
}
public void setInterlocutorUI(final InterlocutorUI interlocutorUI) {
this.activeCall = interlocutorUI;
// Set defaults
muted = false;
onHold = false;
this.phoneNumber = interlocutorUI.getCall().getNumber();
phoneLabel.setText(phoneNumber);
callStarted();
}
public void setupDefaults() {
holdButton.addMouseListener(new MouseAdapter() {
public void mousePressed(MouseEvent mouseEvent) {
toggleHold();
}
});
muteButton.addMouseListener(new MouseAdapter() {
public void mousePressed(MouseEvent mouseEvent) {
toggleMute();
}
});
transferButton.addMouseListener(new MouseAdapter() {
public void mousePressed(MouseEvent mouseEvent) {
TransferManager ui = new TransferManager();
final String number = ui.getNumber(SparkManager.getChatManager().getChatContainer().getChatFrame());
if (ModelUtil.hasLength(number)) {
setStatus("Transferring...", blueColor);
historyPanel.transferring();
SwingWorker transferringThread = new SwingWorker() {
public Object construct() {
try {
Thread.sleep(2000);
}
catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
public void finished() {
setStatus("Transferred", blueColor);
historyPanel.transfer(number);
callWasTransferred = true;
softPhone.handleTransfer(getActiveCall().getID(), number);
callEnded();
}
};
transferringThread.start();
}
}
});
final SoftPhoneManager manager = SoftPhoneManager.getInstance();
hangUpButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent actionEvent) {
manager.getDefaultGuiManager().hangup(activeCall);
hangUpButton.setEnabled(false);
}
});
}
/**
* Called when a new call is established.
*/
private void callStarted() {
// Show History
historyPanel.removeAll();
historyPanel.addPreviousConversations(phoneNumber);
hangUpButton.setEnabled(true);
muteButton.setEnabled(true);
holdButton.setEnabled(true);
transferButton.setEnabled(true);
setStatus(CONNECTED, false);
// Add notification to ChatRoom if one exists.
final ChatRoom chatRoom = callManager.getAssociatedChatRoom(this);
if (chatRoom != null) {
final SimpleDateFormat formatter = new SimpleDateFormat("h:mm a");
String time = formatter.format(new Date());
chatRoom.getTranscriptWindow().insertNotificationMessage(PhoneRes.getIString("phone.callstartedat")+" " + time, ChatManager.NOTIFICATION_COLOR);
}
}
/**
* Called when the call is ended. This does basic container cleanup.
*/
public void callEnded() {
if (!callWasTransferred) {
historyPanel.callEnded();
setStatus("Call Ended", redColor);
}
hangUpButton.setEnabled(false);
hangUpButton.setOpaque(false);
muteButton.setEnabled(false);
muteButton.setOpaque(false);
holdButton.setEnabled(false);
holdButton.setOpaque(false);
transferButton.setEnabled(false);
setStatus("Call Ended", redColor);
// Add notification to ChatRoom if one exists.
final ChatRoom chatRoom = callManager.getAssociatedChatRoom(this);
if (chatRoom != null) {
final SimpleDateFormat formatter = new SimpleDateFormat("h:mm a");
String time = formatter.format(new Date());
chatRoom.getTranscriptWindow().insertNotificationMessage(PhoneRes.getIString("phone.callendedat")+" " + time, ChatManager.NOTIFICATION_COLOR);
}
// If this is a standalone phone call with no associated ChatRoom
// gray out title and show off-phone icon.
final ChatRoom room = callManager.getAssociatedChatRoom(this);
softPhone.addCallSession(room, SoftPhoneManager.CallRoomState.callWasEnded);
// Notify
SparkManager.getChatManager().notifySparkTabHandlers(room);
}
private void setStatus(String status, boolean alert) {
if (alert) {
connectedLabel.setForeground(orangeColor);
}
else {
connectedLabel.setForeground(greenColor);
}
connectedLabel.setText(status);
}
private void setStatus(String status, Color color) {
connectedLabel.setForeground(color);
connectedLabel.setText(status);
}
private void toggleMute() {
if (onHold) {
toggleHold();
}
if (muted) {
muted = false;
muteButton.setToolTipText("Mute");
muteButton.setButtonSelected(false);
setStatus(CONNECTED, false);
// Change the current state.
changeState(CallRoomState.inCall);
}
else {
muted = true;
muteButton.setToolTipText("Unmute");
muteButton.setButtonSelected(true);
setStatus("Muted", true);
// Change the current state
changeState(CallRoomState.muted);
}
muteButton.invalidate();
muteButton.validate();
muteButton.repaint();
softPhone.getDefaultGuiManager().mute(activeCall, !muted);
}
private void toggleHold() {
if (muted) {
toggleMute();
}
if (onHold) {
onHold = false;
holdButton.setToolTipText("Hold");
holdButton.setButtonSelected(false);
setStatus(CONNECTED, false);
// Change the current state
changeState(CallRoomState.inCall);
}
else {
onHold = true;
holdButton.setToolTipText("Unhold");
holdButton.setButtonSelected(true);
setStatus("On Hold", true);
// Change the current state
changeState(CallRoomState.onHold);
}
softPhone.getDefaultGuiManager().hold(activeCall);
}
public void actionPerformed(ActionEvent e) {
}
public String getTabTitle() {
return phoneNumber;
}
public String getFrameTitle() {
return PhoneRes.getIString("phone.onphonewith")+" " + phoneNumber;
}
public ImageIcon getTabIcon() {
return PhoneRes.getImageIcon("RECEIVER2_IMAGE");
}
public JComponent getGUI() {
return this;
}
public String getToolTipDescription() {
return phoneNumber;
}
public boolean closing() {
return true;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void paintComponent(Graphics g) {
BufferedImage cache = new BufferedImage(2, getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D g2d = cache.createGraphics();
GradientPaint paint = new GradientPaint(0, 0, new Color(241, 245, 250), 0, getHeight(), new Color(244, 250, 255), true);
g2d.setPaint(paint);
g2d.fillRect(0, 0, getWidth(), getHeight());
g2d.dispose();
g.drawImage(cache, 0, 0, getWidth(), getHeight(), null);
}
public Dimension getPreferredSize() {
Dimension dim = super.getPreferredSize();
dim.width = 0;
return dim;
}
public InterlocutorUI getActiveCall() {
return activeCall;
}
private void changeState(SoftPhoneManager.CallRoomState state) {
final ChatRoom room = callManager.getAssociatedChatRoom(this);
softPhone.addCallSession(room, state);
SparkManager.getChatManager().notifySparkTabHandlers(room);
}
}
| |
/*
* Copyright 2014-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.metrics.core.jobs;
import static java.util.Arrays.asList;
import static org.hawkular.metrics.core.jobs.TempDataCompressor.JOB_NAME;
import static org.hawkular.metrics.model.MetricType.AVAILABILITY;
import static org.hawkular.metrics.model.MetricType.COUNTER;
import static org.hawkular.metrics.model.MetricType.GAUGE;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.hawkular.metrics.core.service.BaseITest;
import org.hawkular.metrics.core.service.DataAccess;
import org.hawkular.metrics.core.service.MetricsServiceImpl;
import org.hawkular.metrics.core.service.Order;
import org.hawkular.metrics.core.service.TestDataAccessFactory;
import org.hawkular.metrics.core.service.transformers.DataPointDecompressTransformer;
import org.hawkular.metrics.datetime.DateTimeService;
import org.hawkular.metrics.model.AvailabilityType;
import org.hawkular.metrics.model.DataPoint;
import org.hawkular.metrics.model.Metric;
import org.hawkular.metrics.model.MetricId;
import org.hawkular.metrics.model.MetricType;
import org.hawkular.metrics.model.Tenant;
import org.hawkular.metrics.scheduler.api.JobDetails;
import org.hawkular.metrics.scheduler.impl.TestScheduler;
import org.hawkular.metrics.sysconfig.ConfigurationService;
import org.jboss.logging.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.Row;
import com.google.common.collect.ImmutableMap;
import rx.Observable;
import rx.observers.TestSubscriber;
/**
* Test the compression ETL jobs
*
* @author Michael Burman
*/
public class CompressDataJobITest extends BaseITest {
private static Logger logger = Logger.getLogger(CompressDataJobITest.class);
private static AtomicInteger tenantCounter = new AtomicInteger();
private MetricsServiceImpl metricsService;
private DataAccess dataAccess;
private JobsServiceImpl jobsService;
private ConfigurationService configurationService;
private TestScheduler jobScheduler;
private PreparedStatement resetConfig;
private PreparedStatement resetConfig2;
private boolean firstExecute = true;
private JobDetails compressionJob;
@BeforeClass
public void initClass() {
dataAccess = TestDataAccessFactory.newInstance(session);
resetConfig = session.prepare("DELETE FROM sys_config WHERE config_id = 'org.hawkular.metrics.jobs." +
JOB_NAME + "'");
resetConfig2 = session.prepare("DELETE FROM sys_config WHERE config_id = 'org.hawkular.metrics.jobs." +
TempTableCreator.JOB_NAME + "'");
session.execute(resetConfig.bind());
session.execute(resetConfig2.bind());
configurationService = new ConfigurationService();
configurationService.init(rxSession);
metricsService = new MetricsServiceImpl();
metricsService.setDataAccess(dataAccess);
metricsService.setConfigurationService(configurationService);
metricsService.startUp(session, getKeyspace(), true, metricRegistry);
jobScheduler = new TestScheduler(rxSession);
jobScheduler.truncateTables(getKeyspace());
jobsService = new JobsServiceImpl();
jobsService.setSession(rxSession);
jobsService.setScheduler(jobScheduler);
jobsService.setMetricsService(metricsService);
jobsService.setConfigurationService(configurationService);
List<JobDetails> jobDetails = jobsService.start();
JobDetails tableCreator =
jobDetails.stream().filter(d -> d.getJobName().equalsIgnoreCase(TempTableCreator.JOB_NAME))
.findFirst().get();
CountDownLatch latch = new CountDownLatch(1);
jobScheduler.onJobFinished(details -> {
if(details.getJobName().equals(TempTableCreator.JOB_NAME)) {
latch.countDown();
}
});
jobScheduler.advanceTimeTo(tableCreator.getTrigger().getTriggerTime());
jobScheduler.advanceTimeBy(1);
try {
assertTrue(latch.await(25, TimeUnit.SECONDS)); // Wait for tables to be ready
Thread.sleep(3000); // Wait for the prepared statements to be initialized even in Travis
} catch (InterruptedException e) {
assertTrue(false);
}
compressionJob = jobDetails
.stream()
.filter(details -> details.getJobName().equals(JOB_NAME))
.findFirst().get();
long nextStart = LocalDateTime.ofInstant(Instant.ofEpochMilli(jobScheduler.now()), ZoneOffset.UTC)
.with(DateTimeService.startOfNextOddHour())
.toInstant(ZoneOffset.UTC).toEpochMilli();
CountDownLatch latch2 = new CountDownLatch(1);
jobScheduler.onJobFinished(details -> {
if(details.getJobName().equals(JOB_NAME)) {
latch2.countDown();
}
});
jobScheduler.advanceTimeTo(nextStart);
jobScheduler.advanceTimeBy(1);
assertNotNull(compressionJob);
try {
assertTrue(latch.await(25, TimeUnit.SECONDS)); // Wait for first compression to pass
} catch (InterruptedException e) {
assertTrue(false);
}
}
@BeforeMethod
public void initTest(Method method) {
logger.debug("Starting [" + method.getName() + "]");
if(!firstExecute) {
jobScheduler.advanceTimeBy(120);
}
}
@AfterMethod(alwaysRun = true)
public void tearDown() {
}
@Test(priority = 1)
public void testCompressJob() throws Exception {
long now = jobScheduler.now();
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
MetricId<Double> mId = new MetricId<>(tenantId, GAUGE, "m1");
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
Metric<Double> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), 1.1),
new DataPoint<>(start.plusMinutes(2).getMillis(), 2.2),
new DataPoint<>(start.plusMinutes(4).getMillis(), 3.3),
new DataPoint<>(end.getMillis(), 4.4)));
doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m1)));
CountDownLatch latch = new CountDownLatch(1);
jobScheduler.onJobFinished(jobDetails -> {
if(jobDetails.getJobName().equals(JOB_NAME)) {
latch.countDown();
}
});
jobScheduler.advanceTimeTo(compressionJob.getTrigger().getTriggerTime());
jobScheduler.advanceTimeBy(1);
assertTrue(latch.await(25, TimeUnit.SECONDS));
long startSlice = DateTimeService.getTimeSlice(start.getMillis(), Duration.standardHours(2));
long endSlice = DateTimeService.getTimeSlice(jobScheduler.now(), Duration.standardHours(2));
Observable<Row> compressedRows = dataAccess.findCompressedData(mId, startSlice, endSlice, 0, Order.ASC);
TestSubscriber<Row> testSubscriber = new TestSubscriber<>();
compressedRows.subscribe(testSubscriber);
testSubscriber.awaitTerminalEvent(5, TimeUnit.SECONDS);
testSubscriber.assertNoErrors();
testSubscriber.assertCompleted();
List<Row> rows = testSubscriber.getOnNextEvents();
assertEquals(1, rows.size());
ByteBuffer c_value = rows.get(0).getBytes("c_value");
ByteBuffer tags = rows.get(0).getBytes("tags");
assertNotNull(c_value);
assertNull(tags);
firstExecute = false;
}
private <T> void testCompressResults(MetricType<T> type, Metric<T> metric, DateTime start) throws
Exception {
if (metric.getDataPoints() != null && !metric.getDataPoints().isEmpty()) {
doAction(() -> metricsService.addDataPoints(type, Observable.just(metric)));
}
CountDownLatch latch = new CountDownLatch(1);
jobScheduler.onJobFinished(jobDetails -> {
if(jobDetails.getJobName().equals(JOB_NAME)) {
latch.countDown();
}
});
jobScheduler.advanceTimeBy(1);
assertTrue(latch.await(25, TimeUnit.SECONDS));
long startSlice = DateTimeService.getTimeSlice(start.getMillis(), Duration.standardHours(2));
long endSlice = DateTimeService.getTimeSlice(start.plusHours(1).plusMinutes(59).getMillis(), Duration
.standardHours(2));
DataPointDecompressTransformer<T> decompressor = new DataPointDecompressTransformer<>(type, Order.ASC, 0, start
.getMillis(), start.plusMinutes(30).getMillis());
Observable<DataPoint<T>> dataPoints = dataAccess.findCompressedData(metric.getMetricId(), startSlice, endSlice,
0, Order.ASC).compose(decompressor);
TestSubscriber<DataPoint<T>> pointTestSubscriber = new TestSubscriber<>();
dataPoints.subscribe(pointTestSubscriber);
pointTestSubscriber.awaitTerminalEvent(5, TimeUnit.SECONDS);
pointTestSubscriber.assertCompleted();
List<DataPoint<T>> compressedPoints = pointTestSubscriber.getOnNextEvents();
assertEquals(metric.getDataPoints(), compressedPoints);
}
@Test(dependsOnMethods={"testCompressJob"})
public void testGaugeCompress() throws Exception {
long now = jobScheduler.now();
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
MetricId<Double> mId = new MetricId<>(tenantId, GAUGE, "m1");
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
Metric<Double> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), 1.1),
new DataPoint<>(start.plusMinutes(2).getMillis(), 2.2),
new DataPoint<>(start.plusMinutes(4).getMillis(), 3.3),
new DataPoint<>(end.getMillis(), 4.4)));
testCompressResults(GAUGE, m1, start);
}
@Test(dependsOnMethods={"testCompressJob"})
public void testCounterCompress() throws Exception {
long now = jobScheduler.now();
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
MetricId<Long> mId = new MetricId<>(tenantId, COUNTER, "m2");
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
Metric<Long> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), 1L),
new DataPoint<>(start.plusMinutes(2).getMillis(), 2L),
new DataPoint<>(start.plusMinutes(4).getMillis(), 3L),
new DataPoint<>(end.getMillis(), 4L)));
testCompressResults(COUNTER, m1, start);
}
@Test(dependsOnMethods={"testCompressJob"})
public void testAvailabilityCompress() throws Exception {
long now = jobScheduler.now(); // I need to advance the triggerTime of compression job also
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
MetricId<AvailabilityType> mId = new MetricId<>(tenantId, AVAILABILITY, "m3");
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
Metric<AvailabilityType> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), AvailabilityType.UP),
new DataPoint<>(start.plusMinutes(2).getMillis(), AvailabilityType.DOWN),
new DataPoint<>(start.plusMinutes(4).getMillis(), AvailabilityType.DOWN),
new DataPoint<>(end.getMillis(), AvailabilityType.UP)));
testCompressResults(AVAILABILITY, m1, start);
}
@Test(dependsOnMethods={"testCompressJob"})
public void testGaugeWithTags() throws Exception {
long now = jobScheduler.now();
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
MetricId<Double> mId = new MetricId<>(tenantId, GAUGE, "m1");
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
Metric<Double> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), 1.1, ImmutableMap.of("a", "b")),
new DataPoint<>(start.plusMinutes(2).getMillis(), 2.2),
new DataPoint<>(start.plusMinutes(4).getMillis(), 3.3, ImmutableMap.of("d", "e")),
new DataPoint<>(end.getMillis(), 4.4)));
testCompressResults(GAUGE, m1, start);
}
@Test(dependsOnMethods={"testCompressJob"})
public void testCompressRetentionIndex() throws Exception {
long now = jobScheduler.now();
DateTime start = DateTimeService.getTimeSlice(new DateTime(now, DateTimeZone.UTC).minusHours(2),
Duration.standardHours(2)).plusMinutes(30);
DateTime end = start.plusMinutes(20);
String tenantId = nextTenantId() + now;
doAction(() -> metricsService.createTenant(new Tenant(tenantId), false));
//create a metric definition but delete the expiration index entry
//to ensure that an expiration entry is not created without data points
MetricId<Double> mId2 = new MetricId<>(tenantId, GAUGE, "m2");
Metric<Double> m2 = new Metric<>(mId2);
doAction(() -> metricsService.createMetric(m2, true));
doAction(() -> dataAccess.deleteFromMetricExpirationIndex(m2.getMetricId()));
MetricId<Double> mId = new MetricId<>(tenantId, GAUGE, "m1");
Metric<Double> m1 = new Metric<>(mId, asList(
new DataPoint<>(start.getMillis(), 1.1),
new DataPoint<>(start.plusMinutes(2).getMillis(), 2.2),
new DataPoint<>(start.plusMinutes(4).getMillis(), 3.3),
new DataPoint<>(end.getMillis(), 4.4)));
testCompressResults(GAUGE, m1, start);
assertNotNull(dataAccess.findMetricExpiration(m1.getMetricId()).toBlocking().firstOrDefault(null));
assertNull(dataAccess.findMetricExpiration(m2.getMetricId()).toBlocking().firstOrDefault(null));
}
private String nextTenantId() {
return "T" + tenantCounter.getAndIncrement();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.lang;
import java.io.IOException;
import java.net.ConnectException;
import java.util.Arrays;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.util.typedef.X;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import org.apache.ignite.testframework.junits.common.GridCommonTest;
import org.junit.Test;
/**
* Tests for {@link X}.
*/
@GridCommonTest(group = "Lang")
public class GridXSelfTest extends GridCommonAbstractTest {
/**
*
*/
@Test
public void testHasCause() {
ConnectException conEx = new ConnectException();
IOException ioEx = new IOException(conEx);
IgniteCheckedException gridEx = new IgniteCheckedException(ioEx);
assert X.hasCause(gridEx, IOException.class, NumberFormatException.class);
assert !X.hasCause(gridEx, NumberFormatException.class);
assert X.cause(gridEx, IOException.class) == ioEx;
assert X.cause(gridEx, ConnectException.class) == conEx;
assert X.cause(gridEx, NumberFormatException.class) == null;
assert gridEx.getCause(IOException.class) == ioEx;
assert gridEx.getCause(ConnectException.class) == conEx;
assert gridEx.getCause(NumberFormatException.class) == null;
}
/**
* Tests string presentation of given time.
*/
@Test
public void testTimeSpan() {
assertEquals(X.timeSpan2DHMSM(86400001L), "1 day, 00:00:00.001");
assertEquals(X.timeSpan2DHMSM(172800004L), "2 days, 00:00:00.004");
assertEquals(X.timeSpan2DHMSM(1L), "00:00:00.001");
assertEquals(X.timeSpan2HMSM(172800004L), "00:00:00.004");
}
/**
*
*/
@Test
public void testShallowClone() {
// Single not cloneable object
Object obj = new Object();
Object objClone = X.cloneObject(obj, false, true);
assert objClone == obj;
// Single cloneable object
TestCloneable cloneable = new TestCloneable("Some string value");
TestCloneable cloneableClone = X.cloneObject(cloneable, false, true);
assert cloneableClone != null;
assert cloneableClone != cloneable;
assert cloneable.field.equals(cloneableClone.field);
// Integer array.
int[] intArr = {1, 2, 3};
int[] intArrClone = X.cloneObject(intArr, false, true);
assert intArrClone != null;
assert intArrClone != intArr;
assert Arrays.equals(intArrClone, intArr);
// Boolean array.
boolean[] boolArr = {true, false, true};
boolean[] boolArrClone = X.cloneObject(boolArr, false, true);
assert boolArrClone != null;
assert boolArrClone != boolArr;
assert Arrays.equals(boolArrClone, boolArr);
// String array.
String[] strArr = {"str1", "str2", "str3"};
String[] strArrClone = X.cloneObject(strArr, false, true);
assert strArrClone != null;
assert strArrClone != strArr;
assert Arrays.equals(strArrClone, strArr);
}
/**
*
*/
@SuppressWarnings({"StringEquality"})
@Test
public void testDeepCloner() {
// Single not cloneable object
Object obj = new Object();
Object objClone = X.cloneObject(obj, true, true);
assert objClone != null;
assert objClone != obj;
// Single cloneable object
TestCloneable cloneable = new TestCloneable("Some string value");
TestCloneable cloneableClone = X.cloneObject(cloneable, true, false);
assert cloneableClone != null;
assert cloneableClone != cloneable;
assert cloneable.field.equals(cloneableClone.field);
assert cloneable.field != cloneableClone.field;
// Single cloneable object
TestCloneable1 cloneable1 = new TestCloneable1("Some string value");
TestCloneable1 cloneableClone1 = X.cloneObject(cloneable1, true, false);
assert cloneableClone1 != null;
assert cloneableClone1 != cloneable1;
assert cloneable1.field.equals(cloneableClone1.field);
assert cloneable1.field != cloneableClone1.field;
// Integer array.
int[] intArr = {1, 2, 3};
int[] intArrClone = X.cloneObject(intArr, true, false);
assert intArrClone != null;
assert intArrClone != intArr;
assert Arrays.equals(intArrClone, intArr);
// Boolean array.
boolean[] boolArr = {true, false, true};
boolean[] boolArrClone = X.cloneObject(boolArr, true, false);
assert boolArrClone != null;
assert boolArrClone != boolArr;
assert Arrays.equals(boolArrClone, boolArr);
// String array.
String[] strArr = {"str1", "str2", "str3"};
String[] strArrClone = X.cloneObject(strArr, true, false);
assert strArrClone != null;
assert strArrClone != strArr;
assert Arrays.equals(strArrClone, strArr);
for (int i = 0; i < strArr.length; i++) {
assert strArr[i] != strArrClone[i];
assert strArr[i].equals(strArrClone[i]);
}
// Cycles
TestCycled testCycled = new TestCycled();
TestCycled testCycledClone = X.cloneObject(testCycled, true, false);
assert testCycledClone != null;
assert testCycledClone != testCycled;
assert testCycledClone == testCycledClone.cycle;
// Cycles and hierarchy
TestCycledChild testCycledChild = new TestCycledChild();
TestCycledChild testCycledChildClone = X.cloneObject(testCycledChild, true, false);
assert testCycledChildClone != null;
assert testCycledChildClone != testCycledChild;
assert testCycledChildClone == testCycledChildClone.cycle;
assert testCycledChildClone == testCycledChildClone.anotherCycle;
// Cloneable honored
TestCloneable cloneable2 = new TestCloneable("Some string value");
TestCloneable cloneableClone2 = X.cloneObject(cloneable2, true, true);
assert cloneableClone2 != null;
assert cloneableClone2 != cloneable2;
assert cloneable2.field.equals(cloneableClone2.field);
// Try clone class.
X.cloneObject(Integer.class, true, true);
}
/**
* Test cloneable class.
*/
private static class TestCloneable implements Cloneable {
/** */
private String field;
/** */
@SuppressWarnings({"unused"})
private String field1;
/** */
@SuppressWarnings({"unused"})
private final Class cls = Integer.class;
/**
* @param val Field value.
*/
private TestCloneable(String val) {
field = val;
}
/** {@inheritDoc} */
@Override protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
/**
* Test cloneable class.
*/
private static class TestCloneable1 {
/** */
private String field;
/**
* @param val Field value.
*/
private TestCloneable1(String val) {
field = val;
}
/** {@inheritDoc} */
@Override public int hashCode() {
return field.hashCode();
}
}
/**
* Class to test deep cloning with cycles.
*/
private static class TestCycled {
/** */
protected final TestCycled cycle = this;
}
/**
* Class to test hierarchy init.
*/
private static class TestCycledChild extends TestCycled {
/** */
@SuppressWarnings({"unused"})
private final TestCycled anotherCycle = this;
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.script;
import java.math.BigDecimal;
import java.net.URL;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.script.Bindings;
import javax.script.Compilable;
import javax.script.CompiledScript;
import javax.script.ScriptContext;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import org.pentaho.di.compatibility.Value;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettlePluginException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.plugins.KettleURLClassLoader;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.row.value.ValueMetaFactory;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
/*
* Created on 2-jun-2003
*
*/
public class ScriptMeta extends BaseStepMeta implements StepMetaInterface {
private static Class<?> PKG = ScriptMeta.class; // for i18n purposes, needed by Translator2!!
private static final String JSSCRIPT_TAG_TYPE = "jsScript_type";
private static final String JSSCRIPT_TAG_NAME = "jsScript_name";
private static final String JSSCRIPT_TAG_SCRIPT = "jsScript_script";
private ScriptAddClasses[] additionalClasses;
private ScriptValuesScript[] jsScripts;
private String[] fieldname;
private String[] rename;
private int[] type;
private int[] length;
private int[] precision;
private boolean[] replace; // Replace the specified field.
public ScriptMeta() {
super(); // allocate BaseStepMeta
try {
parseXmlForAdditionalClasses();
} catch ( Exception e ) { /* Ignore */
}
}
/**
* @return Returns the length.
*/
public int[] getLength() {
return length;
}
/**
* @param length
* The length to set.
*/
public void setLength( int[] length ) {
this.length = length;
}
/**
* @return Returns the name.
*/
public String[] getFieldname() {
return fieldname;
}
/**
* @param fieldname
* The name to set.
*/
public void setFieldname( String[] fieldname ) {
this.fieldname = fieldname;
}
/**
* @return Returns the precision.
*/
public int[] getPrecision() {
return precision;
}
/**
* @param precision
* The precision to set.
*/
public void setPrecision( int[] precision ) {
this.precision = precision;
}
/**
* @return Returns the rename.
*/
public String[] getRename() {
return rename;
}
/**
* @param rename
* The rename to set.
*/
public void setRename( String[] rename ) {
this.rename = rename;
}
/**
* @return Returns the type.
*/
public int[] getType() {
return type;
}
/**
* @param type
* The type to set.
*/
public void setType( int[] type ) {
this.type = type;
}
public int getNumberOfJSScripts() {
return jsScripts.length;
}
public String[] getJSScriptNames() {
String[] strJSNames = new String[jsScripts.length];
for ( int i = 0; i < jsScripts.length; i++ ) {
strJSNames[i] = jsScripts[i].getScriptName();
}
return strJSNames;
}
public ScriptValuesScript[] getJSScripts() {
return jsScripts;
}
public void setJSScripts( ScriptValuesScript[] jsScripts ) {
this.jsScripts = jsScripts;
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode );
}
public void allocate( int nrfields ) {
fieldname = new String[nrfields];
rename = new String[nrfields];
type = new int[nrfields];
length = new int[nrfields];
precision = new int[nrfields];
replace = new boolean[nrfields];
}
public Object clone() {
ScriptMeta retval = (ScriptMeta) super.clone();
int nrfields = fieldname.length;
retval.allocate( nrfields );
System.arraycopy( fieldname, 0, retval.fieldname, 0, nrfields );
System.arraycopy( rename, 0, retval.rename, 0, nrfields );
System.arraycopy( type, 0, retval.type, 0, nrfields );
System.arraycopy( length, 0, retval.length, 0, nrfields );
System.arraycopy( precision, 0, retval.precision, 0, nrfields );
System.arraycopy( replace, 0, retval.replace, 0, nrfields );
return retval;
}
private void readData( Node stepnode ) throws KettleXMLException {
try {
Node scripts = XMLHandler.getSubNode( stepnode, "jsScripts" );
int nrscripts = XMLHandler.countNodes( scripts, "jsScript" );
jsScripts = new ScriptValuesScript[nrscripts];
for ( int i = 0; i < nrscripts; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( scripts, "jsScript", i );
jsScripts[i] =
new ScriptValuesScript(
Integer.parseInt( XMLHandler.getTagValue( fnode, JSSCRIPT_TAG_TYPE ) ), XMLHandler.getTagValue(
fnode, JSSCRIPT_TAG_NAME ), XMLHandler.getTagValue( fnode, JSSCRIPT_TAG_SCRIPT ) );
}
Node fields = XMLHandler.getSubNode( stepnode, "fields" );
int nrfields = XMLHandler.countNodes( fields, "field" );
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
fieldname[i] = XMLHandler.getTagValue( fnode, "name" );
rename[i] = XMLHandler.getTagValue( fnode, "rename" );
type[i] = ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) );
String slen = XMLHandler.getTagValue( fnode, "length" );
String sprc = XMLHandler.getTagValue( fnode, "precision" );
length[i] = Const.toInt( slen, -1 );
precision[i] = Const.toInt( sprc, -1 );
replace[i] = "Y".equalsIgnoreCase( XMLHandler.getTagValue( fnode, "replace" ) );
}
} catch ( Exception e ) {
throw new KettleXMLException( BaseMessages.getString(
PKG, "ScriptMeta.Exception.UnableToLoadStepInfoFromXML" ), e );
}
}
public void setDefault() {
jsScripts = new ScriptValuesScript[1];
jsScripts[0] =
new ScriptValuesScript( ScriptValuesScript.TRANSFORM_SCRIPT, BaseMessages
.getString( PKG, "Script.Script1" ), "//"
+ BaseMessages.getString( PKG, "Script.ScriptHere" ) + Const.CR + Const.CR );
int nrfields = 0;
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
fieldname[i] = "newvalue";
rename[i] = "newvalue";
type[i] = ValueMetaInterface.TYPE_NUMBER;
length[i] = -1;
precision[i] = -1;
replace[i] = false;
}
}
public void getFields( RowMetaInterface row, String originStepname, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
for ( int i = 0; i < fieldname.length; i++ ) {
if ( !Utils.isEmpty( fieldname[i] ) ) {
String fieldName;
int replaceIndex;
int fieldType;
if ( replace[i] ) {
// Look up the field to replace...
//
if ( row.searchValueMeta( fieldname[i] ) == null && Utils.isEmpty( rename[i] ) ) {
throw new KettleStepException( BaseMessages.getString(
PKG, "ScriptMeta.Exception.FieldToReplaceNotFound", fieldname[i] ) );
}
replaceIndex = row.indexOfValue( rename[i] );
// Change the data type to match what's specified...
//
fieldType = type[i];
fieldName = rename[i];
} else {
replaceIndex = -1;
fieldType = type[i];
if ( rename[i] != null && rename[i].length() != 0 ) {
fieldName = rename[i];
} else {
fieldName = fieldname[i];
}
}
try {
ValueMetaInterface v = ValueMetaFactory.createValueMeta( fieldName, fieldType );
v.setLength( length[i] );
v.setPrecision( precision[i] );
v.setOrigin( originStepname );
if ( replace[i] && replaceIndex >= 0 ) {
row.setValueMeta( replaceIndex, v );
} else {
row.addValueMeta( v );
}
} catch ( KettlePluginException e ) {
// Ignore errors
}
}
}
}
public String getXML() {
StringBuilder retval = new StringBuilder( 300 );
retval.append( " <jsScripts>" );
for ( int i = 0; i < jsScripts.length; i++ ) {
retval.append( " <jsScript>" );
retval
.append( " " ).append( XMLHandler.addTagValue( JSSCRIPT_TAG_TYPE, jsScripts[i].getScriptType() ) );
retval
.append( " " ).append( XMLHandler.addTagValue( JSSCRIPT_TAG_NAME, jsScripts[i].getScriptName() ) );
retval.append( " " ).append( XMLHandler.addTagValue( JSSCRIPT_TAG_SCRIPT, jsScripts[i].getScript() ) );
retval.append( " </jsScript>" );
}
retval.append( " </jsScripts>" );
retval.append( " <fields>" );
for ( int i = 0; i < fieldname.length; i++ ) {
retval.append( " <field>" );
retval.append( " " ).append( XMLHandler.addTagValue( "name", fieldname[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "rename", rename[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "type",
ValueMetaFactory.getValueMetaName( type[i] ) ) );
retval.append( " " ).append( XMLHandler.addTagValue( "length", length[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "precision", precision[i] ) );
retval.append( " " ).append( XMLHandler.addTagValue( "replace", replace[i] ) );
retval.append( " </field>" );
}
retval.append( " </fields>" );
return retval.toString();
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try {
String script = rep.getStepAttributeString( id_step, "script" );
// When in compatibility mode, we load the script, not the other tabs...
//
if ( !Utils.isEmpty( script ) ) {
jsScripts = new ScriptValuesScript[1];
jsScripts[0] = new ScriptValuesScript( ScriptValuesScript.TRANSFORM_SCRIPT, "ScriptValue", script );
} else {
int nrScripts = rep.countNrStepAttributes( id_step, JSSCRIPT_TAG_NAME );
jsScripts = new ScriptValuesScript[nrScripts];
for ( int i = 0; i < nrScripts; i++ ) {
jsScripts[i] = new ScriptValuesScript(
(int) rep.getStepAttributeInteger( id_step, i, JSSCRIPT_TAG_TYPE ),
rep.getStepAttributeString( id_step, i, JSSCRIPT_TAG_NAME ),
rep.getStepAttributeString( id_step, i, JSSCRIPT_TAG_SCRIPT ) );
}
}
int nrfields = rep.countNrStepAttributes( id_step, "field_name" );
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
fieldname[i] = rep.getStepAttributeString( id_step, i, "field_name" );
rename[i] = rep.getStepAttributeString( id_step, i, "field_rename" );
type[i] = ValueMetaFactory.getIdForValueMeta( rep.getStepAttributeString( id_step, i, "field_type" ) );
length[i] = (int) rep.getStepAttributeInteger( id_step, i, "field_length" );
precision[i] = (int) rep.getStepAttributeInteger( id_step, i, "field_precision" );
replace[i] = rep.getStepAttributeBoolean( id_step, i, "field_replace" );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "ScriptMeta.Exception.UnexpectedErrorInReadingStepInfo" ), e );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try {
for ( int i = 0; i < jsScripts.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, JSSCRIPT_TAG_NAME, jsScripts[i].getScriptName() );
rep.saveStepAttribute( id_transformation, id_step, i, JSSCRIPT_TAG_SCRIPT, jsScripts[i].getScript() );
rep.saveStepAttribute( id_transformation, id_step, i, JSSCRIPT_TAG_TYPE, jsScripts[i].getScriptType() );
}
for ( int i = 0; i < fieldname.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldname[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_rename", rename[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_type",
ValueMetaFactory.getValueMetaName( type[i] ) );
rep.saveStepAttribute( id_transformation, id_step, i, "field_length", length[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", precision[i] );
rep.saveStepAttribute( id_transformation, id_step, i, "field_replace", replace[i] );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "ScriptMeta.Exception.UnableToSaveStepInfo" )
+ id_step, e );
}
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
boolean error_found = false;
String error_message = "";
CheckResult cr;
ScriptEngine jscx;
Bindings jsscope;
CompiledScript jsscript;
jscx = createNewScriptEngine( stepMeta.getName() );
jsscope = jscx.getBindings( ScriptContext.ENGINE_SCOPE );
// String strActiveScriptName="";
String strActiveStartScriptName = "";
String strActiveEndScriptName = "";
String strActiveScript = "";
String strActiveStartScript = "";
String strActiveEndScript = "";
// Building the Scripts
if ( jsScripts.length > 0 ) {
for ( int i = 0; i < jsScripts.length; i++ ) {
if ( jsScripts[i].isTransformScript() ) {
// strActiveScriptName =jsScripts[i].getScriptName();
strActiveScript = jsScripts[i].getScript();
} else if ( jsScripts[i].isStartScript() ) {
strActiveStartScriptName = jsScripts[i].getScriptName();
strActiveStartScript = jsScripts[i].getScript();
} else if ( jsScripts[i].isEndScript() ) {
strActiveEndScriptName = jsScripts[i].getScriptName();
strActiveEndScript = jsScripts[i].getScript();
}
}
}
if ( prev != null && strActiveScript.length() > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "ScriptMeta.CheckResult.ConnectedStepOK", String.valueOf( prev.size() ) ), stepMeta );
remarks.add( cr );
// Adding the existing Scripts to the Context
for ( int i = 0; i < getNumberOfJSScripts(); i++ ) {
jsscope.put( jsScripts[i].getScriptName(), jsScripts[i].getScript() );
}
// Modification for Additional Script parsing
try {
if ( getAddClasses() != null ) {
for ( int i = 0; i < getAddClasses().length; i++ ) {
// TODO AKRETION ensure it works
jsscope.put( getAddClasses()[i].getJSName(), getAddClasses()[i].getAddObject() );
// Object jsOut = Context.javaToJS(getAddClasses()[i].getAddObject(), jsscope);
// ScriptableObject.putProperty(jsscope, getAddClasses()[i].getJSName(), jsOut);
// ScriptableObject.putProperty(jsscope, getAddClasses()[i].getJSName(), jsOut);
}
}
} catch ( Exception e ) {
error_message = ( "Couldn't add JavaClasses to Context! Error:" );
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
// Adding some default JavaScriptFunctions to the System
// TODO AKRETION not implemented yet
// try {
// Context.javaToJS(ScriptValuesAddedFunctions.class, jsscope);
// ((ScriptableObject)jsscope).defineFunctionProperties(ScriptValuesAddedFunctions.jsFunctionList,
// ScriptValuesAddedFunctions.class, ScriptableObject.DONTENUM);
// } catch (Exception ex) {
// error_message="Couldn't add Default Functions! Error:"+Const.CR+ex.toString();
// cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepinfo);
// remarks.add(cr);
// };
// Adding some Constants to the JavaScript
try {
jsscope.put( "SKIP_TRANSFORMATION", Integer.valueOf( Script.SKIP_TRANSFORMATION ) );
jsscope.put( "ABORT_TRANSFORMATION", Integer.valueOf( Script.ABORT_TRANSFORMATION ) );
jsscope.put( "ERROR_TRANSFORMATION", Integer.valueOf( Script.ERROR_TRANSFORMATION ) );
jsscope.put( "CONTINUE_TRANSFORMATION", Integer.valueOf( Script.CONTINUE_TRANSFORMATION ) );
} catch ( Exception ex ) {
error_message = "Couldn't add Transformation Constants! Error:" + Const.CR + ex.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
try {
ScriptDummy dummyStep = new ScriptDummy( prev, transMeta.getStepFields( stepMeta ) );
jsscope.put( "_step_", dummyStep );
Object[] row = new Object[prev.size()];
jsscope.put( "rowMeta", prev );
for ( int i = 0; i < prev.size(); i++ ) {
ValueMetaInterface valueMeta = prev.getValueMeta( i );
Object valueData = null;
// Set date and string values to something to simulate real thing
//
if ( valueMeta.isDate() ) {
valueData = new Date();
}
if ( valueMeta.isString() ) {
valueData = "test value test value test value test value test value "
+ "test value test value test value test value test value";
}
if ( valueMeta.isInteger() ) {
valueData = Long.valueOf( 0L );
}
if ( valueMeta.isNumber() ) {
valueData = new Double( 0.0 );
}
if ( valueMeta.isBigNumber() ) {
valueData = BigDecimal.ZERO;
}
if ( valueMeta.isBoolean() ) {
valueData = Boolean.TRUE;
}
if ( valueMeta.isBinary() ) {
valueData = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, };
}
row[i] = valueData;
jsscope.put( valueMeta.getName(), valueData );
}
// Add support for Value class (new Value())
jsscope.put( "Value", Value.class );
// Add the old style row object for compatibility reasons...
//
jsscope.put( "row", row );
} catch ( Exception ev ) {
error_message = "Couldn't add Input fields to Script! Error:" + Const.CR + ev.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
try {
// Checking for StartScript
if ( strActiveStartScript != null && strActiveStartScript.length() > 0 ) {
jscx.eval( strActiveStartScript, jsscope );
error_message = "Found Start Script. " + strActiveStartScriptName + " Processing OK";
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, error_message, stepMeta );
remarks.add( cr );
}
} catch ( Exception e ) {
error_message = "Couldn't process Start Script! Error:" + Const.CR + e.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
try {
jsscript = ( (Compilable) jscx ).compile( strActiveScript );
// cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG,
// "ScriptMeta.CheckResult.ScriptCompiledOK"), stepinfo);
// remarks.add(cr);
try {
jsscript.eval( jsscope );
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "ScriptMeta.CheckResult.ScriptCompiledOK2" ), stepMeta );
remarks.add( cr );
if ( fieldname.length > 0 ) {
StringBuilder message =
new StringBuilder( BaseMessages.getString( PKG, "ScriptMeta.CheckResult.FailedToGetValues", String
.valueOf( fieldname.length ) )
+ Const.CR + Const.CR );
if ( error_found ) {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, message.toString(), stepMeta );
} else {
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, message.toString(), stepMeta );
}
remarks.add( cr );
}
} catch ( ScriptException jse ) {
// Context.exit(); TODO AKRETION NOT SURE
error_message =
BaseMessages.getString( PKG, "ScriptMeta.CheckResult.CouldNotExecuteScript" )
+ Const.CR + jse.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
} catch ( Exception e ) {
// Context.exit(); TODO AKRETION NOT SURE
error_message =
BaseMessages.getString( PKG, "ScriptMeta.CheckResult.CouldNotExecuteScript2" )
+ Const.CR + e.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
// Checking End Script
try {
if ( strActiveEndScript != null && strActiveEndScript.length() > 0 ) {
/* Object endScript = */jscx.eval( strActiveEndScript, jsscope );
error_message = "Found End Script. " + strActiveEndScriptName + " Processing OK";
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, error_message, stepMeta );
remarks.add( cr );
}
} catch ( Exception e ) {
error_message = "Couldn't process End Script! Error:" + Const.CR + e.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
} catch ( Exception e ) {
// Context.exit(); TODO AKRETION NOT SURE
error_message =
BaseMessages.getString( PKG, "ScriptMeta.CheckResult.CouldNotCompileScript" )
+ Const.CR + e.toString();
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
} else {
// Context.exit(); TODO AKRETION NOT SURE
error_message = BaseMessages.getString( PKG, "ScriptMeta.CheckResult.CouldNotGetFieldsFromPreviousStep" );
cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, error_message, stepMeta );
remarks.add( cr );
}
// See if we have input streams leading to this step!
if ( input.length > 0 ) {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(
PKG, "ScriptMeta.CheckResult.ConnectedStepOK2" ), stepMeta );
remarks.add( cr );
} else {
cr =
new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(
PKG, "ScriptMeta.CheckResult.NoInputReceived" ), stepMeta );
remarks.add( cr );
}
}
public String getFunctionFromScript( String strFunction, String strScript ) {
String sRC = "";
int iStartPos = strScript.indexOf( strFunction );
if ( iStartPos > 0 ) {
iStartPos = strScript.indexOf( '{', iStartPos );
int iCounter = 1;
while ( iCounter != 0 ) {
if ( strScript.charAt( iStartPos++ ) == '{' ) {
iCounter++;
} else if ( strScript.charAt( iStartPos++ ) == '}' ) {
iCounter--;
}
sRC = sRC + strScript.charAt( iStartPos );
}
}
return sRC;
}
public boolean getValue( Bindings scope, int i, Value res, StringBuilder message ) {
boolean error_found = false;
if ( fieldname[i] != null && fieldname[i].length() > 0 ) {
res.setName( rename[i] );
res.setType( type[i] );
try {
Object result = scope.get( fieldname[i] );
if ( result != null ) {
String classname = result.getClass().getName();
switch ( type[i] ) {
case ValueMetaInterface.TYPE_NUMBER:
if ( classname.equalsIgnoreCase( "org.mozilla.javascript.Undefined" ) ) {
res.setNull();
} else if ( classname.equalsIgnoreCase( "org.mozilla.javascript.NativeJavaObject" ) ) {
// Is it a java Value class ?
Value v = (Value) result;
res.setValue( v.getNumber() );
} else {
res.setValue( ( (Double) result ).doubleValue() );
}
break;
case ValueMetaInterface.TYPE_INTEGER:
if ( classname.equalsIgnoreCase( "java.lang.Byte" ) ) {
res.setValue( ( (java.lang.Byte) result ).longValue() );
} else if ( classname.equalsIgnoreCase( "java.lang.Short" ) ) {
res.setValue( ( (Short) result ).longValue() );
} else if ( classname.equalsIgnoreCase( "java.lang.Integer" ) ) {
res.setValue( ( (Integer) result ).longValue() );
} else if ( classname.equalsIgnoreCase( "java.lang.Long" ) ) {
res.setValue( ( (Long) result ).longValue() );
} else if ( classname.equalsIgnoreCase( "org.mozilla.javascript.Undefined" ) ) {
res.setNull();
} else if ( classname.equalsIgnoreCase( "org.mozilla.javascript.NativeJavaObject" ) ) {
// Is it a java Value class ?
Value v = (Value) result;
res.setValue( v.getInteger() );
} else {
res.setValue( Math.round( ( (Double) result ).doubleValue() ) );
}
break;
case ValueMetaInterface.TYPE_STRING:
if ( classname.equalsIgnoreCase( "org.mozilla.javascript.NativeJavaObject" )
|| classname.equalsIgnoreCase( "org.mozilla.javascript.Undefined" ) ) {
// Is it a java Value class ?
try {
Value v = (Value) result;
res.setValue( v.getString() );
} catch ( Exception ev ) {
// A String perhaps?
String s = (String) result;
res.setValue( s );
}
} else {
res.setValue( ( (String) result ) );
}
break;
case ValueMetaInterface.TYPE_DATE:
double dbl = 0;
if ( classname.equalsIgnoreCase( "org.mozilla.javascript.Undefined" ) ) {
res.setNull();
} else {
if ( classname.equalsIgnoreCase( "org.mozilla.javascript.NativeDate" ) ) {
dbl = (Double) result; // TODO AKRETION not sure!
} else if ( classname.equalsIgnoreCase( "org.mozilla.javascript.NativeJavaObject" ) ) {
// Is it a java Date() class ?
try {
Date dat = (Date) result;
dbl = dat.getTime();
} catch ( Exception e ) { // Nope, try a Value
Value v = (Value) result;
Date dat = v.getDate();
if ( dat != null ) {
dbl = dat.getTime();
} else {
res.setNull();
}
}
} else { // Finally, try a number conversion to time
dbl = ( (Double) result ).doubleValue();
}
long lng = Math.round( dbl );
Date dat = new Date( lng );
res.setValue( dat );
}
break;
case ValueMetaInterface.TYPE_BOOLEAN:
res.setValue( ( (Boolean) result ).booleanValue() );
break;
default:
res.setNull();
}
} else {
res.setNull();
}
} catch ( Exception e ) {
message.append( BaseMessages.getString( PKG, "ScriptMeta.CheckResult.ErrorRetrievingValue", fieldname[i] )
+ " : " + e.toString() );
error_found = true;
}
res.setLength( length[i], precision[i] );
message.append( BaseMessages.getString( PKG, "ScriptMeta.CheckResult.RetrievedValue", fieldname[i], res
.toStringMeta() ) );
} else {
message.append( BaseMessages.getString( PKG, "ScriptMeta.CheckResult.ValueIsEmpty", String.valueOf( i ) ) );
error_found = true;
}
return error_found;
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ) {
return new Script( stepMeta, stepDataInterface, cnr, transMeta, trans );
}
public StepDataInterface getStepData() {
return new ScriptData();
}
// This is for Additional Classloading
public void parseXmlForAdditionalClasses() throws KettleException {
try {
Properties sysprops = System.getProperties();
String strActPath = sysprops.getProperty( "user.dir" );
Document dom = XMLHandler.loadXMLFile( strActPath + "/plugins/steps/ScriptValues_mod/plugin.xml" );
Node stepnode = dom.getDocumentElement();
Node libraries = XMLHandler.getSubNode( stepnode, "js_libraries" );
int nbOfLibs = XMLHandler.countNodes( libraries, "js_lib" );
additionalClasses = new ScriptAddClasses[nbOfLibs];
for ( int i = 0; i < nbOfLibs; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( libraries, "js_lib", i );
String strJarName = XMLHandler.getTagAttribute( fnode, "name" );
String strClassName = XMLHandler.getTagAttribute( fnode, "classname" );
String strJSName = XMLHandler.getTagAttribute( fnode, "js_name" );
Class<?> addClass =
LoadAdditionalClass( strActPath + "/plugins/steps/ScriptValues_mod/" + strJarName, strClassName );
Object addObject = addClass.newInstance();
additionalClasses[i] = new ScriptAddClasses( addClass, addObject, strJSName );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "ScriptMeta.Exception.UnableToParseXMLforAdditionalClasses" ), e );
}
}
private static Class<?> LoadAdditionalClass( String strJar, String strClassName ) throws KettleException {
try {
Thread t = Thread.currentThread();
ClassLoader cl = t.getContextClassLoader();
URL u = new URL( "jar:file:" + strJar + "!/" );
// We never know what else the script wants to load with the class loader, so lets not close it just like that.
@SuppressWarnings( "resource" )
KettleURLClassLoader kl = new KettleURLClassLoader( new URL[] { u }, cl );
Class<?> toRun = kl.loadClass( strClassName );
return toRun;
} catch ( Exception e ) {
throw new KettleException(
BaseMessages.getString( PKG, "ScriptMeta.Exception.UnableToLoadAdditionalClass" ), e );
}
}
public ScriptAddClasses[] getAddClasses() {
return additionalClasses;
}
public boolean supportsErrorHandling() {
return true;
}
/**
* @return the replace
*/
public boolean[] getReplace() {
return replace;
}
/**
* @param replace
* the replace to set
*/
public void setReplace( boolean[] replace ) {
this.replace = replace;
}
/**
* Instanciates the right scripting language interpreter, falling back to Javascript for backward compat. Because
* Kettle GUI sucks for extensibility, we use the script name extension to determine the language rather than add a
* Combo box. Complain to Pentaho please.
*
* @param stepName
* @return
*/
public static ScriptEngine createNewScriptEngine( String stepName ) {
System.setProperty( "org.jruby.embed.localvariable.behavior", "persistent" ); // required for JRuby, transparent for
// others
if ( Thread.currentThread().getContextClassLoader() == null ) {
Thread.currentThread().setContextClassLoader( ScriptMeta.class.getClassLoader() );
}
ScriptEngineManager manager = new ScriptEngineManager();
String[] strings = stepName.split( "\\." );
String extension = strings[strings.length > 0 ? 1 : 0]; // skip the script number extension
ScriptEngine scriptEngine = manager.getEngineByName( extension );
if ( scriptEngine == null ) { // falls back to Javascript
scriptEngine = manager.getEngineByName( "javascript" );
}
return scriptEngine;
}
}
| |
package com.google.android.apps.common.testing.accessibility.framework;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.android.apps.common.testing.accessibility.framework.proto.AccessibilityEvaluationProtos.IntListProto;
import com.google.android.apps.common.testing.accessibility.framework.proto.AccessibilityEvaluationProtos.MetadataProto;
import com.google.android.apps.common.testing.accessibility.framework.proto.AccessibilityEvaluationProtos.StringListProto;
import com.google.android.apps.common.testing.accessibility.framework.proto.AccessibilityEvaluationProtos.TypedValueProto;
import com.google.android.apps.common.testing.accessibility.framework.proto.AccessibilityEvaluationProtos.TypedValueProto.TypeProto;
import com.google.common.collect.EnumBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.ByteString;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import javax.annotation.Nullable;
/** An implementation of {@link ResultMetadata} backed by a {@link HashMap} */
public class HashMapResultMetadata implements ResultMetadata {
protected final Map<String, TypedValue> map;
public HashMapResultMetadata() {
map = new HashMap<>();
}
/**
* Creates an instance containing a copy of the mappings from the given instance.
*
* @param metadata The instance from which to copy mappings
*/
protected HashMapResultMetadata(HashMapResultMetadata metadata) {
map = new HashMap<>(metadata.map);
}
@Override
public HashMapResultMetadata clone() {
return new HashMapResultMetadata(this);
}
@Override
public boolean getBoolean(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.BOOLEAN != tv.type) {
throw invalidTypeException(key, TypedValue.Type.BOOLEAN, tv.type);
}
return (boolean) tv.value;
}
@Override
public boolean getBoolean(String key, boolean defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.BOOLEAN != tv.type) {
throw invalidTypeException(key, TypedValue.Type.BOOLEAN, tv.type);
}
return (boolean) tv.value;
}
@Override
public void putBoolean(String key, boolean value) {
map.put(key, new TypedValue(TypedValue.Type.BOOLEAN, value));
}
@Override
public byte getByte(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.BYTE != tv.type) {
throw invalidTypeException(key, TypedValue.Type.BYTE, tv.type);
}
return (byte) tv.value;
}
@Override
public byte getByte(String key, byte defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.BYTE != tv.type) {
throw invalidTypeException(key, TypedValue.Type.BYTE, tv.type);
}
return (byte) tv.value;
}
@Override
public void putByte(String key, byte value) {
map.put(key, new TypedValue(TypedValue.Type.BYTE, value));
}
@Override
public short getShort(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.SHORT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.SHORT, tv.type);
}
return (short) tv.value;
}
@Override
public short getShort(String key, short defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.SHORT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.SHORT, tv.type);
}
return (short) tv.value;
}
@Override
public void putShort(String key, short value) {
map.put(key, new TypedValue(TypedValue.Type.SHORT, value));
}
@Override
public char getChar(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.CHAR != tv.type) {
throw invalidTypeException(key, TypedValue.Type.CHAR, tv.type);
}
return (char) tv.value;
}
@Override
public char getChar(String key, char defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.CHAR != tv.type) {
throw invalidTypeException(key, TypedValue.Type.CHAR, tv.type);
}
return (char) tv.value;
}
@Override
public void putChar(String key, char value) {
map.put(key, new TypedValue(TypedValue.Type.CHAR, value));
}
@Override
public int getInt(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.INT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.INT, tv.type);
}
return (int) tv.value;
}
@Override
public int getInt(String key, int defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.INT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.INT, tv.type);
}
return (int) tv.value;
}
@Override
public void putInt(String key, int value) {
map.put(key, new TypedValue(TypedValue.Type.INT, value));
}
@Override
public float getFloat(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.FLOAT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.FLOAT, tv.type);
}
return (float) tv.value;
}
@Override
public float getFloat(String key, float defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.FLOAT != tv.type) {
throw invalidTypeException(key, TypedValue.Type.FLOAT, tv.type);
}
return (float) tv.value;
}
@Override
public void putFloat(String key, float value) {
map.put(key, new TypedValue(TypedValue.Type.FLOAT, value));
}
@Override
public long getLong(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.LONG != tv.type) {
throw invalidTypeException(key, TypedValue.Type.LONG, tv.type);
}
return (long) tv.value;
}
@Override
public long getLong(String key, long defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.LONG != tv.type) {
throw invalidTypeException(key, TypedValue.Type.LONG, tv.type);
}
return (long) tv.value;
}
@Override
public void putLong(String key, long value) {
map.put(key, new TypedValue(TypedValue.Type.LONG, value));
}
@Override
public double getDouble(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.DOUBLE != tv.type) {
throw invalidTypeException(key, TypedValue.Type.DOUBLE, tv.type);
}
return (double) tv.value;
}
@Override
public double getDouble(String key, double defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.DOUBLE != tv.type) {
throw invalidTypeException(key, TypedValue.Type.DOUBLE, tv.type);
}
return (double) tv.value;
}
@Override
public void putDouble(String key, double value) {
map.put(key, new TypedValue(TypedValue.Type.DOUBLE, value));
}
@Override
public String getString(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.STRING != tv.type) {
throw invalidTypeException(key, TypedValue.Type.STRING, tv.type);
}
return (String) tv.value;
}
@Override
public String getString(String key, String defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.STRING != tv.type) {
throw invalidTypeException(key, TypedValue.Type.STRING, tv.type);
}
return (String) tv.value;
}
@Override
public void putString(String key, String value) {
map.put(key, new TypedValue(TypedValue.Type.STRING, value));
}
@SuppressWarnings("unchecked") // Safe specification in TypedValue
@Override
public ImmutableList<String> getStringList(String key) {
TypedValue tv = map.get(key);
if (tv == null) {
throw invalidKeyException(key);
} else if (TypedValue.Type.STRING_LIST != tv.type) {
throw invalidTypeException(key, TypedValue.Type.STRING_LIST, tv.type);
}
return (ImmutableList<String>) tv.value;
}
@SuppressWarnings("unchecked") // Safe specification in TypedValue
@Override
public ImmutableList<String> getStringList(String key, ImmutableList<String> defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
return defaultValue;
} else if (TypedValue.Type.STRING_LIST != tv.type) {
throw invalidTypeException(key, TypedValue.Type.STRING_LIST, tv.type);
}
return (ImmutableList<String>) tv.value;
}
@Override
public void putStringList(String key, List<String> value) {
checkArgument(!value.isEmpty());
map.put(key, new TypedValue(TypedValue.Type.STRING_LIST, ImmutableList.copyOf(value)));
}
@Override
public ImmutableList<Integer> getIntegerList(String key) {
return getValue(key, TypedValue.Type.INTEGER_LIST, null);
}
@Override
public ImmutableList<Integer> getIntegerList(String key, ImmutableList<Integer> defaultValue) {
return getValue(key, TypedValue.Type.INTEGER_LIST, defaultValue);
}
@Override
public void putIntegerList(String key, List<Integer> value) {
checkArgument(!value.isEmpty());
map.put(key, new TypedValue(TypedValue.Type.INTEGER_LIST, ImmutableList.copyOf(value)));
}
@Override
public boolean containsKey(String key) {
return map.containsKey(key);
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
public static HashMapResultMetadata fromProto(MetadataProto proto) {
HashMapResultMetadata metadata = new HashMapResultMetadata();
for (Entry<String, TypedValueProto> entry : proto.getMetadataMapMap().entrySet()) {
metadata.map.put(entry.getKey(), new TypedValue(entry.getValue()));
}
return metadata;
}
public MetadataProto toProto() {
MetadataProto.Builder builder = MetadataProto.newBuilder();
for (Entry<String, TypedValue> entry : map.entrySet()) {
builder.putMetadataMap(entry.getKey(), entry.getValue().toProto());
}
return builder.build();
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof HashMapResultMetadata)) {
return false;
}
HashMapResultMetadata metadata = (HashMapResultMetadata) o;
return map.equals(metadata.map);
}
@Override
public int hashCode() {
return map.hashCode();
}
// For debugging
@Override
public String toString() {
return new TreeMap<String, TypedValue>(map).toString();
}
@SuppressWarnings("unchecked") // Safe specification in TypedValue
private <T> T getValue(String key, TypedValue.Type type, @Nullable T defaultValue) {
TypedValue tv = map.get(key);
if (tv == null) {
if (defaultValue != null) {
return defaultValue;
} else {
throw invalidKeyException(key);
}
} else if (type != tv.type) {
throw invalidTypeException(key, type, tv.type);
}
return (T) tv.value;
}
private static NoSuchElementException invalidKeyException(String key) {
return new NoSuchElementException(
"No HashMapResultMetadata element found for key '" + key + "'.");
}
private static ClassCastException invalidTypeException(
String key, TypedValue.Type requestedType, TypedValue.Type foundType) {
return new ClassCastException(
"Invalid type '"
+ requestedType.name()
+ "' requested from HashMapResultMetadata for key '"
+ key
+ "'. Found type '"
+ foundType.name()
+ "' instead.");
}
/**
* Used internally to track the value types of entries in metadata's {@code map}. Explicitly
* storing types in this fashion allows for simpler serialization and deserialization.
*/
protected static class TypedValue {
/** Mapping between in memory and serializable types. */
private static final EnumBiMap<Type, TypeProto> TYPE_MAP =
EnumBiMap.create(
ImmutableMap.<Type, TypeProto>builder()
.put(Type.BOOLEAN, TypeProto.BOOLEAN)
.put(Type.BYTE, TypeProto.BYTE)
.put(Type.SHORT, TypeProto.SHORT)
.put(Type.CHAR, TypeProto.CHAR)
.put(Type.INT, TypeProto.INT)
.put(Type.FLOAT, TypeProto.FLOAT)
.put(Type.LONG, TypeProto.LONG)
.put(Type.DOUBLE, TypeProto.DOUBLE)
.put(Type.STRING, TypeProto.STRING)
.put(Type.STRING_LIST, TypeProto.STRING_LIST)
.put(Type.INTEGER_LIST, TypeProto.INT_LIST)
.build());
/**
* The supported types for entries within metadata maps
*
* <p>CONTRACT: Any Type defined here must have a corresponding {@code TypeProto} in {@link
* #TYPE_MAP}.
*
* <p>CONTRACT: All Types must provide:
*
* <ul>
* <li>An implementation to serialize its typed data to a protocol buffer format within {@link
* TypedValue#toProto()}
* <li>An implementation to deserialize its typed data from a protocol buffer format within
* {@link TypedValue}'s protocol buffer constructor
* </ul>
*/
public enum Type {
BOOLEAN,
BYTE,
SHORT,
CHAR,
INT,
FLOAT,
LONG,
DOUBLE,
STRING,
STRING_LIST,
INTEGER_LIST;
public static Type fromProto(TypeProto proto) {
return checkNotNull(TYPE_MAP.inverse().get(proto));
}
public TypeProto toProto() {
return checkNotNull(TYPE_MAP.get(this));
}
}
public Type type;
public Object value;
public TypedValue(Type type, Object value) {
this.type = type;
this.value = value;
}
public TypedValue(TypedValueProto proto) {
type = Type.fromProto(proto.getType());
switch (type) {
case BOOLEAN:
value = proto.getBooleanValue();
break;
case BYTE:
value = proto.getByteValue().asReadOnlyByteBuffer().get();
break;
case SHORT:
value = proto.getShortValue().asReadOnlyByteBuffer().getShort();
break;
case CHAR:
value = proto.getCharValue().asReadOnlyByteBuffer().getChar();
break;
case INT:
value = proto.getIntValue();
break;
case FLOAT:
value = proto.getFloatValue();
break;
case LONG:
value = proto.getLongValue();
break;
case DOUBLE:
value = proto.getDoubleValue();
break;
case STRING:
value = proto.getStringValue();
break;
case STRING_LIST:
value = ImmutableList.copyOf(proto.getStringListValue().getValuesList());
break;
case INTEGER_LIST:
value = ImmutableList.copyOf(proto.getIntListValue().getValuesList());
break;
}
}
@SuppressWarnings("unchecked") // Safe specification in TypedValue
public TypedValueProto toProto() {
TypedValueProto.Builder builder = TypedValueProto.newBuilder();
builder.setType(type.toProto());
switch (type) {
case BOOLEAN:
builder.setBooleanValue((boolean) value);
break;
case BYTE:
ByteBuffer byteBuffer = ByteBuffer.allocate(1);
byteBuffer.put((byte) value).flip();
builder.setByteValue(ByteString.copyFrom(byteBuffer));
break;
case SHORT:
ByteBuffer shortBuffer = ByteBuffer.allocate(2);
shortBuffer.putShort((short) value).flip();
builder.setShortValue(ByteString.copyFrom(shortBuffer));
break;
case CHAR:
ByteBuffer charBuffer = ByteBuffer.allocate(2);
charBuffer.putChar((char) value).flip();
builder.setCharValue(ByteString.copyFrom(charBuffer));
break;
case INT:
builder.setIntValue((int) value);
break;
case FLOAT:
builder.setFloatValue((float) value);
break;
case LONG:
builder.setLongValue((long) value);
break;
case DOUBLE:
builder.setDoubleValue((double) value);
break;
case STRING:
builder.setStringValue((String) value);
break;
case STRING_LIST:
builder.setStringListValue(
StringListProto.newBuilder().addAllValues((ImmutableList<String>) value).build());
break;
case INTEGER_LIST:
builder.setIntListValue(
IntListProto.newBuilder().addAllValues((ImmutableList<Integer>) value).build());
break;
}
return builder.build();
}
@Override
public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TypedValue)) {
return false;
}
TypedValue that = (TypedValue) o;
if (type != that.type) {
return false;
}
return value.equals(that.value);
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + value.hashCode();
return result;
}
// For debugging
@Override
public String toString() {
return String.valueOf(value);
}
}
}
| |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import android.content.ComponentName;
import android.content.Context;
import android.os.Bundle;
import android.os.IBinder;
import androidx.test.filters.MediumTest;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.process_launcher.ChildConnectionAllocator;
import org.chromium.base.process_launcher.ChildProcessConnection;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.content_public.browser.ContentFeatureList;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.content_public.browser.NavigationController;
import org.chromium.content_public.browser.test.util.TestCallbackHelperContainer;
import org.chromium.content_shell_apk.ChildProcessLauncherTestUtils;
import org.chromium.content_shell_apk.ContentShellActivity;
import org.chromium.content_shell_apk.ContentShellActivityTestRule;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
/**
* Integration test that starts the full shell and load pages to test ChildProcessLauncher
* and related code.
*/
@RunWith(BaseJUnit4ClassRunner.class)
public class ChildProcessLauncherIntegrationTest {
@Rule
public final ContentShellActivityTestRule mActivityTestRule =
new ContentShellActivityTestRule();
private static class TestChildProcessConnectionFactory
implements ChildConnectionAllocator.ConnectionFactory {
private final List<TestChildProcessConnection> mConnections = new ArrayList<>();
@Override
public ChildProcessConnection createConnection(Context context, ComponentName serviceName,
ComponentName fallbackServiceName, boolean bindToCaller,
boolean bindAsExternalService, Bundle serviceBundle, String instanceName) {
TestChildProcessConnection connection = new TestChildProcessConnection(
context, serviceName, bindToCaller, bindAsExternalService, serviceBundle);
mConnections.add(connection);
return connection;
}
public List<TestChildProcessConnection> getConnections() {
return mConnections;
}
}
private static class TestChildProcessConnection extends ChildProcessConnection {
private RuntimeException mRemovedBothModerateAndStrongBinding;
public TestChildProcessConnection(Context context, ComponentName serviceName,
boolean bindToCaller, boolean bindAsExternalService,
Bundle childProcessCommonParameters) {
super(context, serviceName, null /* fallbackServiceName */, bindToCaller,
bindAsExternalService, childProcessCommonParameters, null /* instanceName */);
}
@Override
protected void unbind() {
super.unbind();
if (mRemovedBothModerateAndStrongBinding == null) {
mRemovedBothModerateAndStrongBinding = new RuntimeException("unbind");
}
}
@Override
public void removeModerateBinding(boolean waiveCpuPrority) {
super.removeModerateBinding(waiveCpuPrority);
if (mRemovedBothModerateAndStrongBinding == null && !isStrongBindingBound()) {
mRemovedBothModerateAndStrongBinding =
new RuntimeException("removeModerateBinding");
}
}
@Override
public void removeStrongBinding() {
super.removeStrongBinding();
if (mRemovedBothModerateAndStrongBinding == null && !isModerateBindingBound()) {
mRemovedBothModerateAndStrongBinding = new RuntimeException("removeStrongBinding");
}
}
public void throwIfDroppedBothModerateAndStrongBinding() {
if (mRemovedBothModerateAndStrongBinding != null) {
throw new RuntimeException(mRemovedBothModerateAndStrongBinding);
}
}
}
@Test
@MediumTest
// This test may run with --site-per-process, which also enables a feature to maintain a
// spare renderer process. The spare process interferes with assertions on the number of
// process connections in this test, so disable it.
@CommandLineFlags.Add({"disable-features=SpareRendererForSitePerProcess"})
public void testCrossDomainNavigationDoNotLoseImportance() throws Throwable {
final TestChildProcessConnectionFactory factory = new TestChildProcessConnectionFactory();
final List<TestChildProcessConnection> connections = factory.getConnections();
ChildProcessLauncherHelperImpl.setSandboxServicesSettingsForTesting(factory,
10 /* arbitrary number, only realy need 2 */, null /* use default service name */);
// TODO(boliu,nasko): Ensure navigation is actually successful
// before proceeding.
ContentShellActivity activity = mActivityTestRule.launchContentShellWithUrlSync(
"content/test/data/android/title1.html");
NavigationController navigationController =
mActivityTestRule.getWebContents().getNavigationController();
TestCallbackHelperContainer testCallbackHelperContainer =
new TestCallbackHelperContainer(activity.getActiveWebContents());
mActivityTestRule.loadUrl(navigationController, testCallbackHelperContainer,
new LoadUrlParams(UrlUtils.getIsolatedTestFileUrl(
"content/test/data/android/geolocation.html")));
ChildProcessLauncherTestUtils.runOnLauncherThreadBlocking(new Runnable() {
@Override
public void run() {
Assert.assertEquals(1, connections.size());
connections.get(0).throwIfDroppedBothModerateAndStrongBinding();
}
});
mActivityTestRule.loadUrl(
navigationController, testCallbackHelperContainer, new LoadUrlParams("data:,foo"));
ChildProcessLauncherTestUtils.runOnLauncherThreadBlocking(new Runnable() {
@Override
public void run() {
if (ContentFeatureList.isEnabled(
ContentFeatureList.PROCESS_SHARING_WITH_STRICT_SITE_INSTANCES)) {
// If this feature is turned on all the URLs will use the same process.
// Verify that the process has not lost its importance now that the
// data: URL is also in the same process as the file: URLs.
Assert.assertEquals(1, connections.size());
connections.get(0).throwIfDroppedBothModerateAndStrongBinding();
} else {
Assert.assertEquals(2, connections.size());
connections.get(1).throwIfDroppedBothModerateAndStrongBinding();
}
}
});
}
@Test
@MediumTest
// This test may run with --site-per-process, which also enables a feature to maintain a
// spare renderer process. The spare process interferes with assertions on the number of
// process connections in this test, so disable it.
@CommandLineFlags.Add({"disable-features=SpareRendererForSitePerProcess"})
public void testIntentionalKillToFreeServiceSlot() throws Throwable {
final TestChildProcessConnectionFactory factory = new TestChildProcessConnectionFactory();
final List<TestChildProcessConnection> connections = factory.getConnections();
ChildProcessLauncherHelperImpl.setSandboxServicesSettingsForTesting(
factory, 1, null /* use default service name */);
// Doing a cross-domain navigation would need to kill the first process in order to create
// the second process.
ContentShellActivity activity = mActivityTestRule.launchContentShellWithUrlSync(
"content/test/data/android/vsync.html");
NavigationController navigationController =
mActivityTestRule.getWebContents().getNavigationController();
TestCallbackHelperContainer testCallbackHelperContainer =
new TestCallbackHelperContainer(activity.getActiveWebContents());
mActivityTestRule.loadUrl(navigationController, testCallbackHelperContainer,
new LoadUrlParams(UrlUtils.getIsolatedTestFileUrl(
"content/test/data/android/geolocation.html")));
mActivityTestRule.loadUrl(
navigationController, testCallbackHelperContainer, new LoadUrlParams("data:,foo"));
ChildProcessLauncherTestUtils.runOnLauncherThreadBlocking(new Runnable() {
@Override
public void run() {
if (ContentFeatureList.isEnabled(
ContentFeatureList.PROCESS_SHARING_WITH_STRICT_SITE_INSTANCES)) {
// If this feature is turned on all the URLs will use the same process
// and this test will not observe any kills.
Assert.assertEquals(1, connections.size());
Assert.assertFalse(connections.get(0).isKilledByUs());
} else {
// The file: URLs and data: URL are expected to be in different processes and
// the data: URL is expected to kill the process used for the file: URLs.
// Note: The default SiteInstance process model also follows this path because
// file: URLs are not allowed in the default SiteInstance process while data:
// URLs are.
Assert.assertEquals(2, connections.size());
Assert.assertTrue(connections.get(0).isKilledByUs());
}
}
});
}
private static class CrashOnLaunchChildProcessConnection extends TestChildProcessConnection {
private boolean mCrashServiceCalled;
private final CountDownLatch mDisconnectedLatch = new CountDownLatch(1);
// Arguments to setupConnection
private Bundle mConnectionBundle;
private List<IBinder> mClientInterfaces;
private ConnectionCallback mConnectionCallback;
public CrashOnLaunchChildProcessConnection(Context context, ComponentName serviceName,
boolean bindToCaller, boolean bindAsExternalService,
Bundle childProcessCommonParameters) {
super(context, serviceName, bindToCaller, bindAsExternalService,
childProcessCommonParameters);
}
@Override
protected void onServiceConnectedOnLauncherThread(IBinder service) {
super.onServiceConnectedOnLauncherThread(service);
crashServiceForTesting();
mCrashServiceCalled = true;
if (mConnectionBundle != null) {
super.setupConnection(
mConnectionBundle, mClientInterfaces, mConnectionCallback, null);
mConnectionBundle = null;
mClientInterfaces = null;
mConnectionCallback = null;
}
}
@Override
protected void onServiceDisconnectedOnLauncherThread() {
super.onServiceDisconnectedOnLauncherThread();
mDisconnectedLatch.countDown();
}
@Override
public void setupConnection(Bundle connectionBundle, List<IBinder> clientInterfaces,
ConnectionCallback connectionCallback, ZygoteInfoCallback zygoteInfoCallback) {
// Make sure setupConnection is called after crashServiceForTesting so that
// setupConnection is guaranteed to fail.
if (mCrashServiceCalled) {
super.setupConnection(connectionBundle, clientInterfaces, connectionCallback, null);
return;
}
mConnectionBundle = connectionBundle;
mClientInterfaces = clientInterfaces;
mConnectionCallback = connectionCallback;
}
public void waitForDisconnect() throws InterruptedException {
mDisconnectedLatch.await();
}
}
private static class CrashOnLaunchChildProcessConnectionFactory
extends TestChildProcessConnectionFactory {
// Only create one CrashOnLaunchChildProcessConnection.
private CrashOnLaunchChildProcessConnection mCrashConnection;
@Override
public ChildProcessConnection createConnection(Context context, ComponentName serviceName,
ComponentName fallbackServiceName, boolean bindToCaller,
boolean bindAsExternalService, Bundle serviceBundle, String instanceName) {
if (mCrashConnection == null) {
mCrashConnection = new CrashOnLaunchChildProcessConnection(
context, serviceName, bindToCaller, bindAsExternalService, serviceBundle);
return mCrashConnection;
}
return super.createConnection(context, serviceName, fallbackServiceName, bindToCaller,
bindAsExternalService, serviceBundle, instanceName);
}
public CrashOnLaunchChildProcessConnection getCrashConnection() {
return mCrashConnection;
}
}
@Test
@MediumTest
public void testCrashOnLaunch() throws Throwable {
final CrashOnLaunchChildProcessConnectionFactory factory =
new CrashOnLaunchChildProcessConnectionFactory();
ChildProcessLauncherHelperImpl.setSandboxServicesSettingsForTesting(
factory, 1, null /* use default service name */);
// Load url which should fail.
String url = UrlUtils.getIsolatedTestFileUrl("content/test/data/android/title1.html");
ContentShellActivity activity = mActivityTestRule.launchContentShellWithUrl(url);
// Poll until connection is allocated, then wait until connection is disconnected.
CriteriaHelper.pollInstrumentationThread(
()
-> ChildProcessLauncherTestUtils.runOnLauncherAndGetResult(
() -> factory.getCrashConnection() != null),
"The connection wasn't established.");
CrashOnLaunchChildProcessConnection crashConnection =
ChildProcessLauncherTestUtils.runOnLauncherAndGetResult(
() -> factory.getCrashConnection());
crashConnection.waitForDisconnect();
// Load a new URL and make sure everything is ok.
NavigationController navigationController =
mActivityTestRule.getWebContents().getNavigationController();
TestCallbackHelperContainer testCallbackHelperContainer =
new TestCallbackHelperContainer(activity.getActiveWebContents());
mActivityTestRule.loadUrl(navigationController, testCallbackHelperContainer,
new LoadUrlParams(UrlUtils.getIsolatedTestFileUrl(
"content/test/data/android/geolocation.html")));
mActivityTestRule.waitForActiveShellToBeDoneLoading();
Assert.assertTrue(factory.getConnections().size() > 0);
}
}
| |
/*******************************************************************************
* Copyright 2009-2017 Amazon Services. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at: http://aws.amazon.com/apache2.0
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*******************************************************************************
* Shipment Item
* API Version: 2015-05-01
* Library Version: 2017-07-26
* Generated: Tue Jul 25 12:48:56 UTC 2017
*/
package com.amazonservices.mws.finances.model;
import java.util.List;
import java.util.ArrayList;
import com.amazonservices.mws.client.*;
/**
* ShipmentItem complex type.
*
* XML schema:
*
* <pre>
* <complexType name="ShipmentItem">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="SellerSKU" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="OrderItemId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="OrderAdjustmentItemId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="QuantityShipped" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/>
* <element name="ItemChargeList" type="{http://mws.amazonservices.com/Finances/2015-05-01}ChargeComponent" maxOccurs="unbounded"/>
* <element name="ItemChargeAdjustmentList" type="{http://mws.amazonservices.com/Finances/2015-05-01}ChargeComponent" maxOccurs="unbounded"/>
* <element name="ItemFeeList" type="{http://mws.amazonservices.com/Finances/2015-05-01}FeeComponent" maxOccurs="unbounded"/>
* <element name="ItemFeeAdjustmentList" type="{http://mws.amazonservices.com/Finances/2015-05-01}FeeComponent" maxOccurs="unbounded"/>
* <element name="PromotionList" type="{http://mws.amazonservices.com/Finances/2015-05-01}Promotion" maxOccurs="unbounded"/>
* <element name="PromotionAdjustmentList" type="{http://mws.amazonservices.com/Finances/2015-05-01}Promotion" maxOccurs="unbounded"/>
* <element name="CostOfPointsGranted" type="{http://mws.amazonservices.com/Finances/2015-05-01}Currency" minOccurs="0"/>
* <element name="CostOfPointsReturned" type="{http://mws.amazonservices.com/Finances/2015-05-01}Currency" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*/
public class ShipmentItem extends AbstractMwsObject {
private String sellerSKU;
private String orderItemId;
private String orderAdjustmentItemId;
private Integer quantityShipped;
private List<ChargeComponent> itemChargeList;
private List<ChargeComponent> itemChargeAdjustmentList;
private List<FeeComponent> itemFeeList;
private List<FeeComponent> itemFeeAdjustmentList;
private List<Promotion> promotionList;
private List<Promotion> promotionAdjustmentList;
private Currency costOfPointsGranted;
private Currency costOfPointsReturned;
/**
* Get the value of SellerSKU.
*
* @return The value of SellerSKU.
*/
public String getSellerSKU() {
return sellerSKU;
}
/**
* Set the value of SellerSKU.
*
* @param sellerSKU
* The new value to set.
*/
public void setSellerSKU(String sellerSKU) {
this.sellerSKU = sellerSKU;
}
/**
* Check to see if SellerSKU is set.
*
* @return true if SellerSKU is set.
*/
public boolean isSetSellerSKU() {
return sellerSKU != null;
}
/**
* Set the value of SellerSKU, return this.
*
* @param sellerSKU
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withSellerSKU(String sellerSKU) {
this.sellerSKU = sellerSKU;
return this;
}
/**
* Get the value of OrderItemId.
*
* @return The value of OrderItemId.
*/
public String getOrderItemId() {
return orderItemId;
}
/**
* Set the value of OrderItemId.
*
* @param orderItemId
* The new value to set.
*/
public void setOrderItemId(String orderItemId) {
this.orderItemId = orderItemId;
}
/**
* Check to see if OrderItemId is set.
*
* @return true if OrderItemId is set.
*/
public boolean isSetOrderItemId() {
return orderItemId != null;
}
/**
* Set the value of OrderItemId, return this.
*
* @param orderItemId
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withOrderItemId(String orderItemId) {
this.orderItemId = orderItemId;
return this;
}
/**
* Get the value of OrderAdjustmentItemId.
*
* @return The value of OrderAdjustmentItemId.
*/
public String getOrderAdjustmentItemId() {
return orderAdjustmentItemId;
}
/**
* Set the value of OrderAdjustmentItemId.
*
* @param orderAdjustmentItemId
* The new value to set.
*/
public void setOrderAdjustmentItemId(String orderAdjustmentItemId) {
this.orderAdjustmentItemId = orderAdjustmentItemId;
}
/**
* Check to see if OrderAdjustmentItemId is set.
*
* @return true if OrderAdjustmentItemId is set.
*/
public boolean isSetOrderAdjustmentItemId() {
return orderAdjustmentItemId != null;
}
/**
* Set the value of OrderAdjustmentItemId, return this.
*
* @param orderAdjustmentItemId
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withOrderAdjustmentItemId(String orderAdjustmentItemId) {
this.orderAdjustmentItemId = orderAdjustmentItemId;
return this;
}
/**
* Get the value of QuantityShipped.
*
* @return The value of QuantityShipped.
*/
public Integer getQuantityShipped() {
return quantityShipped;
}
/**
* Set the value of QuantityShipped.
*
* @param quantityShipped
* The new value to set.
*/
public void setQuantityShipped(Integer quantityShipped) {
this.quantityShipped = quantityShipped;
}
/**
* Check to see if QuantityShipped is set.
*
* @return true if QuantityShipped is set.
*/
public boolean isSetQuantityShipped() {
return quantityShipped != null;
}
/**
* Set the value of QuantityShipped, return this.
*
* @param quantityShipped
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withQuantityShipped(Integer quantityShipped) {
this.quantityShipped = quantityShipped;
return this;
}
/**
* Get the value of ItemChargeList.
*
* @return The value of ItemChargeList.
*/
public List<ChargeComponent> getItemChargeList() {
if (itemChargeList==null) {
itemChargeList = new ArrayList<ChargeComponent>();
}
return itemChargeList;
}
/**
* Set the value of ItemChargeList.
*
* @param itemChargeList
* The new value to set.
*/
public void setItemChargeList(List<ChargeComponent> itemChargeList) {
this.itemChargeList = itemChargeList;
}
/**
* Clear ItemChargeList.
*/
public void unsetItemChargeList() {
this.itemChargeList = null;
}
/**
* Check to see if ItemChargeList is set.
*
* @return true if ItemChargeList is set.
*/
public boolean isSetItemChargeList() {
return itemChargeList != null && !itemChargeList.isEmpty();
}
/**
* Add values for ItemChargeList, return this.
*
* @param itemChargeList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withItemChargeList(ChargeComponent... values) {
List<ChargeComponent> list = getItemChargeList();
for (ChargeComponent value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of ItemChargeAdjustmentList.
*
* @return The value of ItemChargeAdjustmentList.
*/
public List<ChargeComponent> getItemChargeAdjustmentList() {
if (itemChargeAdjustmentList==null) {
itemChargeAdjustmentList = new ArrayList<ChargeComponent>();
}
return itemChargeAdjustmentList;
}
/**
* Set the value of ItemChargeAdjustmentList.
*
* @param itemChargeAdjustmentList
* The new value to set.
*/
public void setItemChargeAdjustmentList(List<ChargeComponent> itemChargeAdjustmentList) {
this.itemChargeAdjustmentList = itemChargeAdjustmentList;
}
/**
* Clear ItemChargeAdjustmentList.
*/
public void unsetItemChargeAdjustmentList() {
this.itemChargeAdjustmentList = null;
}
/**
* Check to see if ItemChargeAdjustmentList is set.
*
* @return true if ItemChargeAdjustmentList is set.
*/
public boolean isSetItemChargeAdjustmentList() {
return itemChargeAdjustmentList != null && !itemChargeAdjustmentList.isEmpty();
}
/**
* Add values for ItemChargeAdjustmentList, return this.
*
* @param itemChargeAdjustmentList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withItemChargeAdjustmentList(ChargeComponent... values) {
List<ChargeComponent> list = getItemChargeAdjustmentList();
for (ChargeComponent value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of ItemFeeList.
*
* @return The value of ItemFeeList.
*/
public List<FeeComponent> getItemFeeList() {
if (itemFeeList==null) {
itemFeeList = new ArrayList<FeeComponent>();
}
return itemFeeList;
}
/**
* Set the value of ItemFeeList.
*
* @param itemFeeList
* The new value to set.
*/
public void setItemFeeList(List<FeeComponent> itemFeeList) {
this.itemFeeList = itemFeeList;
}
/**
* Clear ItemFeeList.
*/
public void unsetItemFeeList() {
this.itemFeeList = null;
}
/**
* Check to see if ItemFeeList is set.
*
* @return true if ItemFeeList is set.
*/
public boolean isSetItemFeeList() {
return itemFeeList != null && !itemFeeList.isEmpty();
}
/**
* Add values for ItemFeeList, return this.
*
* @param itemFeeList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withItemFeeList(FeeComponent... values) {
List<FeeComponent> list = getItemFeeList();
for (FeeComponent value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of ItemFeeAdjustmentList.
*
* @return The value of ItemFeeAdjustmentList.
*/
public List<FeeComponent> getItemFeeAdjustmentList() {
if (itemFeeAdjustmentList==null) {
itemFeeAdjustmentList = new ArrayList<FeeComponent>();
}
return itemFeeAdjustmentList;
}
/**
* Set the value of ItemFeeAdjustmentList.
*
* @param itemFeeAdjustmentList
* The new value to set.
*/
public void setItemFeeAdjustmentList(List<FeeComponent> itemFeeAdjustmentList) {
this.itemFeeAdjustmentList = itemFeeAdjustmentList;
}
/**
* Clear ItemFeeAdjustmentList.
*/
public void unsetItemFeeAdjustmentList() {
this.itemFeeAdjustmentList = null;
}
/**
* Check to see if ItemFeeAdjustmentList is set.
*
* @return true if ItemFeeAdjustmentList is set.
*/
public boolean isSetItemFeeAdjustmentList() {
return itemFeeAdjustmentList != null && !itemFeeAdjustmentList.isEmpty();
}
/**
* Add values for ItemFeeAdjustmentList, return this.
*
* @param itemFeeAdjustmentList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withItemFeeAdjustmentList(FeeComponent... values) {
List<FeeComponent> list = getItemFeeAdjustmentList();
for (FeeComponent value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of PromotionList.
*
* @return The value of PromotionList.
*/
public List<Promotion> getPromotionList() {
if (promotionList==null) {
promotionList = new ArrayList<Promotion>();
}
return promotionList;
}
/**
* Set the value of PromotionList.
*
* @param promotionList
* The new value to set.
*/
public void setPromotionList(List<Promotion> promotionList) {
this.promotionList = promotionList;
}
/**
* Clear PromotionList.
*/
public void unsetPromotionList() {
this.promotionList = null;
}
/**
* Check to see if PromotionList is set.
*
* @return true if PromotionList is set.
*/
public boolean isSetPromotionList() {
return promotionList != null && !promotionList.isEmpty();
}
/**
* Add values for PromotionList, return this.
*
* @param promotionList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withPromotionList(Promotion... values) {
List<Promotion> list = getPromotionList();
for (Promotion value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of PromotionAdjustmentList.
*
* @return The value of PromotionAdjustmentList.
*/
public List<Promotion> getPromotionAdjustmentList() {
if (promotionAdjustmentList==null) {
promotionAdjustmentList = new ArrayList<Promotion>();
}
return promotionAdjustmentList;
}
/**
* Set the value of PromotionAdjustmentList.
*
* @param promotionAdjustmentList
* The new value to set.
*/
public void setPromotionAdjustmentList(List<Promotion> promotionAdjustmentList) {
this.promotionAdjustmentList = promotionAdjustmentList;
}
/**
* Clear PromotionAdjustmentList.
*/
public void unsetPromotionAdjustmentList() {
this.promotionAdjustmentList = null;
}
/**
* Check to see if PromotionAdjustmentList is set.
*
* @return true if PromotionAdjustmentList is set.
*/
public boolean isSetPromotionAdjustmentList() {
return promotionAdjustmentList != null && !promotionAdjustmentList.isEmpty();
}
/**
* Add values for PromotionAdjustmentList, return this.
*
* @param promotionAdjustmentList
* New values to add.
*
* @return This instance.
*/
public ShipmentItem withPromotionAdjustmentList(Promotion... values) {
List<Promotion> list = getPromotionAdjustmentList();
for (Promotion value : values) {
list.add(value);
}
return this;
}
/**
* Get the value of CostOfPointsGranted.
*
* @return The value of CostOfPointsGranted.
*/
public Currency getCostOfPointsGranted() {
return costOfPointsGranted;
}
/**
* Set the value of CostOfPointsGranted.
*
* @param costOfPointsGranted
* The new value to set.
*/
public void setCostOfPointsGranted(Currency costOfPointsGranted) {
this.costOfPointsGranted = costOfPointsGranted;
}
/**
* Check to see if CostOfPointsGranted is set.
*
* @return true if CostOfPointsGranted is set.
*/
public boolean isSetCostOfPointsGranted() {
return costOfPointsGranted != null;
}
/**
* Set the value of CostOfPointsGranted, return this.
*
* @param costOfPointsGranted
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withCostOfPointsGranted(Currency costOfPointsGranted) {
this.costOfPointsGranted = costOfPointsGranted;
return this;
}
/**
* Get the value of CostOfPointsReturned.
*
* @return The value of CostOfPointsReturned.
*/
public Currency getCostOfPointsReturned() {
return costOfPointsReturned;
}
/**
* Set the value of CostOfPointsReturned.
*
* @param costOfPointsReturned
* The new value to set.
*/
public void setCostOfPointsReturned(Currency costOfPointsReturned) {
this.costOfPointsReturned = costOfPointsReturned;
}
/**
* Check to see if CostOfPointsReturned is set.
*
* @return true if CostOfPointsReturned is set.
*/
public boolean isSetCostOfPointsReturned() {
return costOfPointsReturned != null;
}
/**
* Set the value of CostOfPointsReturned, return this.
*
* @param costOfPointsReturned
* The new value to set.
*
* @return This instance.
*/
public ShipmentItem withCostOfPointsReturned(Currency costOfPointsReturned) {
this.costOfPointsReturned = costOfPointsReturned;
return this;
}
/**
* Read members from a MwsReader.
*
* @param r
* The reader to read from.
*/
@Override
public void readFragmentFrom(MwsReader r) {
sellerSKU = r.read("SellerSKU", String.class);
orderItemId = r.read("OrderItemId", String.class);
orderAdjustmentItemId = r.read("OrderAdjustmentItemId", String.class);
quantityShipped = r.read("QuantityShipped", Integer.class);
itemChargeList = r.readList("ItemChargeList", "ChargeComponent", ChargeComponent.class);
itemChargeAdjustmentList = r.readList("ItemChargeAdjustmentList", "ChargeComponent", ChargeComponent.class);
itemFeeList = r.readList("ItemFeeList", "FeeComponent", FeeComponent.class);
itemFeeAdjustmentList = r.readList("ItemFeeAdjustmentList", "FeeComponent", FeeComponent.class);
promotionList = r.readList("PromotionList", "Promotion", Promotion.class);
promotionAdjustmentList = r.readList("PromotionAdjustmentList", "Promotion", Promotion.class);
costOfPointsGranted = r.read("CostOfPointsGranted", Currency.class);
costOfPointsReturned = r.read("CostOfPointsReturned", Currency.class);
}
/**
* Write members to a MwsWriter.
*
* @param w
* The writer to write to.
*/
@Override
public void writeFragmentTo(MwsWriter w) {
w.write("SellerSKU", sellerSKU);
w.write("OrderItemId", orderItemId);
w.write("OrderAdjustmentItemId", orderAdjustmentItemId);
w.write("QuantityShipped", quantityShipped);
w.writeList("ItemChargeList", "ChargeComponent", itemChargeList);
w.writeList("ItemChargeAdjustmentList", "ChargeComponent", itemChargeAdjustmentList);
w.writeList("ItemFeeList", "FeeComponent", itemFeeList);
w.writeList("ItemFeeAdjustmentList", "FeeComponent", itemFeeAdjustmentList);
w.writeList("PromotionList", "Promotion", promotionList);
w.writeList("PromotionAdjustmentList", "Promotion", promotionAdjustmentList);
w.write("CostOfPointsGranted", costOfPointsGranted);
w.write("CostOfPointsReturned", costOfPointsReturned);
}
/**
* Write tag, xmlns and members to a MwsWriter.
*
* @param w
* The Writer to write to.
*/
@Override
public void writeTo(MwsWriter w) {
w.write("http://mws.amazonservices.com/Finances/2015-05-01", "ShipmentItem",this);
}
/** Default constructor. */
public ShipmentItem() {
super();
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.persistence.deploy;
import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.camunda.bpm.engine.exception.cmmn.CaseDefinitionNotFoundException;
import org.camunda.bpm.engine.exception.dmn.DecisionDefinitionNotFoundException;
import org.camunda.bpm.engine.impl.ProcessDefinitionQueryImpl;
import org.camunda.bpm.engine.impl.ProcessEngineLogger;
import org.camunda.bpm.engine.impl.cmd.GetDeploymentResourceCmd;
import org.camunda.bpm.engine.impl.cmmn.entity.repository.CaseDefinitionEntity;
import org.camunda.bpm.engine.impl.cmmn.entity.repository.CaseDefinitionQueryImpl;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.db.EnginePersistenceLogger;
import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionDefinitionEntity;
import org.camunda.bpm.engine.impl.dmn.entity.repository.DecisionDefinitionQueryImpl;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.persistence.entity.DeploymentEntity;
import org.camunda.bpm.engine.impl.persistence.entity.ProcessDefinitionEntity;
import org.camunda.bpm.engine.repository.CaseDefinition;
import org.camunda.bpm.engine.repository.DecisionDefinition;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.model.bpmn.Bpmn;
import org.camunda.bpm.model.bpmn.BpmnModelInstance;
import org.camunda.bpm.model.cmmn.Cmmn;
import org.camunda.bpm.model.cmmn.CmmnModelInstance;
import org.camunda.bpm.model.dmn.Dmn;
import org.camunda.bpm.model.dmn.DmnModelInstance;
/**
* @author Tom Baeyens
* @author Falko Menge
*/
public class DeploymentCache {
protected static final EnginePersistenceLogger LOG = ProcessEngineLogger.PERSISTENCE_LOGGER;
protected Map<String, ProcessDefinitionEntity> processDefinitionCache = new HashMap<String, ProcessDefinitionEntity>();
protected Map<String, CaseDefinitionEntity> caseDefinitionCache = new HashMap<String, CaseDefinitionEntity>();
protected Map<String, DecisionDefinitionEntity> decisionDefinitionCache = new HashMap<String, DecisionDefinitionEntity>();
protected Map<String, BpmnModelInstance> bpmnModelInstanceCache = new HashMap<String, BpmnModelInstance>();
protected Map<String, CmmnModelInstance> cmmnModelInstanceCache = new HashMap<String, CmmnModelInstance>();
protected Map<String, DmnModelInstance> dmnModelInstanceCache = new HashMap<String, DmnModelInstance>();
protected List<Deployer> deployers;
public void deploy(final DeploymentEntity deployment) {
Context.getCommandContext().runWithoutAuthorization(new Callable<Void>() {
public Void call() throws Exception {
for (Deployer deployer: deployers) {
deployer.deploy(deployment);
}
return null;
}
});
}
// PROCESS DEFINITION ////////////////////////////////////////////////////////////////////////////////
public ProcessDefinitionEntity findDeployedProcessDefinitionById(String processDefinitionId) {
ensureNotNull("Invalid process definition id", "processDefinitionId", processDefinitionId);
CommandContext commandContext = Context.getCommandContext();
ProcessDefinitionEntity processDefinition = commandContext.getDbEntityManager().getCachedEntity(ProcessDefinitionEntity.class, processDefinitionId);
if (processDefinition == null) {
processDefinition = commandContext
.getProcessDefinitionManager()
.findLatestProcessDefinitionById(processDefinitionId);
}
ensureNotNull("no deployed process definition found with id '" + processDefinitionId + "'", "processDefinition", processDefinition);
processDefinition = resolveProcessDefinition(processDefinition);
return processDefinition;
}
public ProcessDefinitionEntity findDeployedLatestProcessDefinitionByKey(String processDefinitionKey) {
ProcessDefinitionEntity processDefinition = Context
.getCommandContext()
.getProcessDefinitionManager()
.findLatestProcessDefinitionByKey(processDefinitionKey);
ensureNotNull("no processes deployed with key '" + processDefinitionKey + "'", "processDefinition", processDefinition);
processDefinition = resolveProcessDefinition(processDefinition);
return processDefinition;
}
public ProcessDefinitionEntity findDeployedProcessDefinitionByKeyAndVersion(final String processDefinitionKey, final Integer processDefinitionVersion) {
final CommandContext commandContext = Context.getCommandContext();
ProcessDefinitionEntity processDefinition = commandContext.runWithoutAuthorization(new Callable<ProcessDefinitionEntity>() {
public ProcessDefinitionEntity call() throws Exception {
return (ProcessDefinitionEntity) commandContext
.getProcessDefinitionManager()
.findProcessDefinitionByKeyAndVersion(processDefinitionKey, processDefinitionVersion);
}
});
ensureNotNull("no processes deployed with key = '" + processDefinitionKey + "' and version = '" + processDefinitionVersion + "'", "processDefinition", processDefinition);
processDefinition = resolveProcessDefinition(processDefinition);
return processDefinition;
}
public ProcessDefinitionEntity findDeployedProcessDefinitionByDeploymentAndKey(String deploymentId, String processDefinitionKey) {
ProcessDefinitionEntity processDefinition = Context
.getCommandContext()
.getProcessDefinitionManager()
.findProcessDefinitionByDeploymentAndKey(deploymentId, processDefinitionKey);
ensureNotNull("no processes deployed with key = '" + processDefinitionKey + "' in deployment = '" + deploymentId + "'", "processDefinition", processDefinition);
processDefinition = resolveProcessDefinition(processDefinition);
return processDefinition;
}
public ProcessDefinitionEntity resolveProcessDefinition(ProcessDefinitionEntity processDefinition) {
String processDefinitionId = processDefinition.getId();
String deploymentId = processDefinition.getDeploymentId();
ProcessDefinitionEntity cachedProcessDefinition = processDefinitionCache.get(processDefinitionId);
if (cachedProcessDefinition==null) {
DeploymentEntity deployment = Context
.getCommandContext()
.getDeploymentManager()
.findDeploymentById(deploymentId);
deployment.setNew(false);
deploy(deployment);
cachedProcessDefinition = processDefinitionCache.get(processDefinitionId);
ensureNotNull("deployment '" + deploymentId + "' didn't put process definition '" + processDefinitionId + "' in the cache", "cachedProcessDefinition", cachedProcessDefinition);
} else {
// update cached process definition
cachedProcessDefinition.updateModifiedFieldsFromEntity(processDefinition);
}
return cachedProcessDefinition;
}
public BpmnModelInstance findBpmnModelInstanceForProcessDefinition(ProcessDefinitionEntity processDefinitionEntity) {
BpmnModelInstance bpmnModelInstance = bpmnModelInstanceCache.get(processDefinitionEntity.getId());
if(bpmnModelInstance == null) {
bpmnModelInstance = loadAndCacheBpmnModelInstance(processDefinitionEntity);
}
return bpmnModelInstance;
}
public BpmnModelInstance findBpmnModelInstanceForProcessDefinition(String processDefinitionId) {
BpmnModelInstance bpmnModelInstance = bpmnModelInstanceCache.get(processDefinitionId);
if(bpmnModelInstance == null) {
ProcessDefinitionEntity processDefinition = findDeployedProcessDefinitionById(processDefinitionId);
bpmnModelInstance = loadAndCacheBpmnModelInstance(processDefinition);
}
return bpmnModelInstance;
}
protected BpmnModelInstance loadAndCacheBpmnModelInstance(final ProcessDefinitionEntity processDefinitionEntity) {
final CommandContext commandContext = Context.getCommandContext();
InputStream bpmnResourceInputStream = commandContext.runWithoutAuthorization(new Callable<InputStream>() {
public InputStream call() throws Exception {
return new GetDeploymentResourceCmd(processDefinitionEntity.getDeploymentId(), processDefinitionEntity.getResourceName()).execute(commandContext);
}
});
try {
BpmnModelInstance bpmnModelInstance = Bpmn.readModelFromStream(bpmnResourceInputStream);
bpmnModelInstanceCache.put(processDefinitionEntity.getId(), bpmnModelInstance);
return bpmnModelInstance;
}catch(Exception e) {
throw LOG.loadModelException("BPMN", "process", processDefinitionEntity.getId(), e);
}
}
public void addProcessDefinition(ProcessDefinitionEntity processDefinition) {
processDefinitionCache.put(processDefinition.getId(), processDefinition);
}
public void removeProcessDefinition(String processDefinitionId) {
processDefinitionCache.remove(processDefinitionId);
bpmnModelInstanceCache.remove(processDefinitionId);
}
public void discardProcessDefinitionCache() {
processDefinitionCache.clear();
bpmnModelInstanceCache.clear();
}
// CASE DEFINITION ////////////////////////////////////////////////////////////////////////////////
public CaseDefinitionEntity findDeployedCaseDefinitionById(String caseDefinitionId) {
ensureNotNull("Invalid case definition id", "caseDefinitionId", caseDefinitionId);
CommandContext commandContext = Context.getCommandContext();
// try to load case definition from cache
CaseDefinitionEntity caseDefinition = commandContext
.getDbEntityManager()
.getCachedEntity(CaseDefinitionEntity.class, caseDefinitionId);
if (caseDefinition == null) {
// if not found, then load the case definition
// from db
caseDefinition = commandContext
.getCaseDefinitionManager()
.findCaseDefinitionById(caseDefinitionId);
}
ensureNotNull(CaseDefinitionNotFoundException.class, "no deployed case definition found with id '" + caseDefinitionId + "'", "caseDefinition", caseDefinition);
caseDefinition = resolveCaseDefinition(caseDefinition);
return caseDefinition;
}
public CaseDefinitionEntity findDeployedLatestCaseDefinitionByKey(String caseDefinitionKey) {
ensureNotNull("Invalid case definition key", "caseDefinitionKey", caseDefinitionKey);
// load case definition by key from db
CaseDefinitionEntity caseDefinition = Context
.getCommandContext()
.getCaseDefinitionManager()
.findLatestCaseDefinitionByKey(caseDefinitionKey);
ensureNotNull(CaseDefinitionNotFoundException.class, "no case definition deployed with key '" + caseDefinitionKey + "'", "caseDefinition", caseDefinition);
caseDefinition = resolveCaseDefinition(caseDefinition);
return caseDefinition;
}
public CaseDefinitionEntity findDeployedCaseDefinitionByKeyAndVersion(String caseDefinitionKey, Integer caseDefinitionVersion) {
CaseDefinitionEntity caseDefinition = Context
.getCommandContext()
.getCaseDefinitionManager()
.findCaseDefinitionByKeyAndVersion(caseDefinitionKey, caseDefinitionVersion);
ensureNotNull(CaseDefinitionNotFoundException.class, "no case definition deployed with key = '" + caseDefinitionKey + "' and version = '" + caseDefinitionVersion + "'", "caseDefinition", caseDefinition);
caseDefinition = resolveCaseDefinition(caseDefinition);
return caseDefinition;
}
public CaseDefinitionEntity findDeployedCaseDefinitionByDeploymentAndKey(String deploymentId, String caseDefinitionKey) {
CaseDefinitionEntity caseDefinition = Context
.getCommandContext()
.getCaseDefinitionManager()
.findCaseDefinitionByDeploymentAndKey(deploymentId, caseDefinitionKey);
ensureNotNull(CaseDefinitionNotFoundException.class, "no case definition deployed with key = '" + caseDefinitionKey + "' in deployment = '" + deploymentId + "'", "caseDefinition", caseDefinition);
caseDefinition = resolveCaseDefinition(caseDefinition);
return caseDefinition;
}
public CaseDefinitionEntity getCaseDefinitionById(String caseDefinitionId) {
ensureNotNull("caseDefinitionId", caseDefinitionId);
CaseDefinitionEntity caseDefinition = caseDefinitionCache.get(caseDefinitionId);
if (caseDefinition == null) {
caseDefinition = findDeployedCaseDefinitionById(caseDefinitionId);
}
return caseDefinition;
}
public CaseDefinitionEntity resolveCaseDefinition(CaseDefinitionEntity caseDefinition) {
String caseDefinitionId = caseDefinition.getId();
String deploymentId = caseDefinition.getDeploymentId();
CaseDefinitionEntity cachedCaseDefinition = caseDefinitionCache.get(caseDefinitionId);
if (cachedCaseDefinition==null) {
DeploymentEntity deployment = Context
.getCommandContext()
.getDeploymentManager()
.findDeploymentById(deploymentId);
deployment.setNew(false);
deploy(deployment);
cachedCaseDefinition = caseDefinitionCache.get(caseDefinitionId);
ensureNotNull("deployment '" + deploymentId + "' didn't put case definition '" + caseDefinitionId + "' in the cache", "cachedCaseDefinition", cachedCaseDefinition);
}
return cachedCaseDefinition;
}
public CmmnModelInstance findCmmnModelInstanceForCaseDefinition(String caseDefinitionId) {
CmmnModelInstance cmmnModelInstance = cmmnModelInstanceCache.get(caseDefinitionId);
if(cmmnModelInstance == null) {
CaseDefinitionEntity caseDefinition = findDeployedCaseDefinitionById(caseDefinitionId);
final String deploymentId = caseDefinition.getDeploymentId();
final String resourceName = caseDefinition.getResourceName();
final CommandContext commandContext = Context.getCommandContext();
InputStream cmmnResourceInputStream = commandContext.runWithoutAuthorization(new Callable<InputStream>() {
public InputStream call() throws Exception {
return new GetDeploymentResourceCmd(deploymentId, resourceName).execute(commandContext);
}
});
try {
cmmnModelInstance = Cmmn.readModelFromStream(cmmnResourceInputStream);
}catch(Exception e) {
throw LOG.loadModelException("CMMN", "case", caseDefinitionId, e);
}
// put model instance into cache.
cmmnModelInstanceCache.put(caseDefinitionId, cmmnModelInstance);
}
return cmmnModelInstance;
}
public void addCaseDefinition(CaseDefinitionEntity caseDefinition) {
caseDefinitionCache.put(caseDefinition.getId(), caseDefinition);
}
public void removeCaseDefinition(String caseDefinitionId) {
caseDefinitionCache.remove(caseDefinitionId);
cmmnModelInstanceCache.remove(caseDefinitionId);
}
public void discardCaseDefinitionCache() {
caseDefinitionCache.clear();
cmmnModelInstanceCache.clear();
}
// DECISION DEFINITION ////////////////////////////////////////////////////////////////////////////
public DecisionDefinitionEntity findDeployedDecisionDefinitionById(String decisionDefinitionId) {
ensureNotNull("Invalid decision definition id", "decisionDefinitionId", decisionDefinitionId);
CommandContext commandContext = Context.getCommandContext();
// try to load decision definition from cache
DecisionDefinitionEntity decisionDefinition = commandContext
.getDbEntityManager()
.getCachedEntity(DecisionDefinitionEntity.class, decisionDefinitionId);
if (decisionDefinition == null) {
// if not found, then load the decision definition
// from db
decisionDefinition = commandContext
.getDecisionDefinitionManager()
.findDecisionDefinitionById(decisionDefinitionId);
}
ensureNotNull(DecisionDefinitionNotFoundException.class, "no deployed decision definition found with id '" + decisionDefinitionId + "'", "decisionDefinition", decisionDefinition);
decisionDefinition = resolveDecisionDefinition(decisionDefinition);
return decisionDefinition;
}
public DecisionDefinition findDeployedLatestDecisionDefinitionByKey(String decisionDefinitionKey) {
ensureNotNull("Invalid decision definition key", "caseDefinitionKey", decisionDefinitionKey);
DecisionDefinitionEntity decisionDefinition = Context
.getCommandContext()
.getDecisionDefinitionManager()
.findLatestDecisionDefinitionByKey(decisionDefinitionKey);
ensureNotNull(DecisionDefinitionNotFoundException.class, "no decision definition deployed with key '" + decisionDefinitionKey + "'", "decisionDefinition", decisionDefinition);
decisionDefinition = resolveDecisionDefinition(decisionDefinition);
return decisionDefinition;
}
public DecisionDefinition findDeployedDecisionDefinitionByDeploymentAndKey(String deploymentId, String decisionDefinitionKey) {
DecisionDefinitionEntity decisionDefinition = Context
.getCommandContext()
.getDecisionDefinitionManager()
.findDecisionDefinitionByDeploymentAndKey(deploymentId, decisionDefinitionKey);
ensureNotNull(DecisionDefinitionNotFoundException.class, "no decision definition deployed with key = '" + decisionDefinitionKey + "' in deployment = '" + deploymentId + "'", "decisionDefinition", decisionDefinition);
decisionDefinition = resolveDecisionDefinition(decisionDefinition);
return decisionDefinition;
}
public DecisionDefinition findDeployedDecisionDefinitionByKeyAndVersion(String decisionDefinitionKey, Integer decisionDefinitionVersion) {
DecisionDefinitionEntity decisionDefinition = Context
.getCommandContext()
.getDecisionDefinitionManager()
.findDecisionDefinitionByKeyAndVersion(decisionDefinitionKey, decisionDefinitionVersion);
ensureNotNull(DecisionDefinitionNotFoundException.class, "no decision definition deployed with key = '" + decisionDefinitionKey + "' and version = '" + decisionDefinitionVersion + "'", "decisionDefinition", decisionDefinition);
decisionDefinition = resolveDecisionDefinition(decisionDefinition);
return decisionDefinition;
}
public DecisionDefinitionEntity resolveDecisionDefinition(DecisionDefinitionEntity decisionDefinition) {
String decisionDefinitionId = decisionDefinition.getId();
String deploymentId = decisionDefinition.getDeploymentId();
DecisionDefinitionEntity cachedDecisionDefinition = decisionDefinitionCache.get(decisionDefinitionId);
if (cachedDecisionDefinition==null) {
DeploymentEntity deployment = Context
.getCommandContext()
.getDeploymentManager()
.findDeploymentById(deploymentId);
deployment.setNew(false);
deploy(deployment);
cachedDecisionDefinition = decisionDefinitionCache.get(decisionDefinitionId);
ensureNotNull("deployment '" + deploymentId + "' didn't put decision definition '" + decisionDefinitionId + "' in the cache", "cachedDecisionDefinition", cachedDecisionDefinition);
}
return cachedDecisionDefinition;
}
public DmnModelInstance findDmnModelInstanceForDecisionDefinition(String decisionDefinitionId) {
DmnModelInstance dmnModelInstance = dmnModelInstanceCache.get(decisionDefinitionId);
if(dmnModelInstance == null) {
DecisionDefinitionEntity decisionDefinition = findDeployedDecisionDefinitionById(decisionDefinitionId);
final String deploymentId = decisionDefinition.getDeploymentId();
final String resourceName = decisionDefinition.getResourceName();
final CommandContext commandContext = Context.getCommandContext();
InputStream dmnResourceInputStream = commandContext.runWithoutAuthorization(new Callable<InputStream>() {
public InputStream call() throws Exception {
return new GetDeploymentResourceCmd(deploymentId, resourceName).execute(commandContext);
}
});
try {
dmnModelInstance = Dmn.readModelFromStream(dmnResourceInputStream);
}catch(Exception e) {
throw LOG.loadModelException("DMN", "decision", decisionDefinitionId, e);
}
// put model instance into cache.
dmnModelInstanceCache.put(decisionDefinitionId, dmnModelInstance);
}
return dmnModelInstance;
}
public void addDecisionDefinition(DecisionDefinitionEntity decisionDefinition) {
decisionDefinitionCache.put(decisionDefinition.getId(), decisionDefinition);
}
public void removeDecisionDefinition(String decisionDefinitionId) {
decisionDefinitionCache.remove(decisionDefinitionId);
dmnModelInstanceCache.remove(decisionDefinitionId);
}
public void discardDecisionDefinitionCache() {
decisionDefinitionCache.clear();
dmnModelInstanceCache.clear();
}
// getters and setters //////////////////////////////////////////////////////
public Map<String, BpmnModelInstance> getBpmnModelInstanceCache() {
return bpmnModelInstanceCache;
}
public Map<String, CmmnModelInstance> getCmmnModelInstanceCache() {
return cmmnModelInstanceCache;
}
public Map<String, DmnModelInstance> getDmnModelInstanceCache() {
return dmnModelInstanceCache;
}
public Map<String, ProcessDefinitionEntity> getProcessDefinitionCache() {
return processDefinitionCache;
}
public void setProcessDefinitionCache(Map<String, ProcessDefinitionEntity> processDefinitionCache) {
this.processDefinitionCache = processDefinitionCache;
}
public Map<String, CaseDefinitionEntity> getCaseDefinitionCache() {
return caseDefinitionCache;
}
public void setCaseDefinitionCache(Map<String, CaseDefinitionEntity> caseDefinitionCache) {
this.caseDefinitionCache = caseDefinitionCache;
}
public List<Deployer> getDeployers() {
return deployers;
}
public void setDeployers(List<Deployer> deployers) {
this.deployers = deployers;
}
public void removeDeployment(String deploymentId) {
removeAllProcessDefinitionsByDeploymentId(deploymentId);
removeAllCaseDefinitionsByDeploymentId(deploymentId);
removeAllDecisionDefinitionsByDeploymentId(deploymentId);
}
protected void removeAllProcessDefinitionsByDeploymentId(final String deploymentId) {
// remove all process definitions for a specific deployment
final CommandContext commandContext = Context.getCommandContext();
List<ProcessDefinition> allDefinitionsForDeployment = commandContext.runWithoutAuthorization(new Callable<List<ProcessDefinition>>() {
public List<ProcessDefinition> call() throws Exception {
return new ProcessDefinitionQueryImpl()
.deploymentId(deploymentId)
.list();
}
});
for (ProcessDefinition processDefinition : allDefinitionsForDeployment) {
try {
removeProcessDefinition(processDefinition.getId());
} catch(Exception e) {
LOG.removeEntryFromDeploymentCacheFailure("process", processDefinition.getId(), e);
}
}
}
protected void removeAllCaseDefinitionsByDeploymentId(String deploymentId) {
// remove all case definitions for a specific deployment
List<CaseDefinition> allDefinitionsForDeployment = new CaseDefinitionQueryImpl()
.deploymentId(deploymentId)
.list();
for (CaseDefinition caseDefinition : allDefinitionsForDeployment) {
try {
removeCaseDefinition(caseDefinition.getId());
} catch(Exception e) {
LOG.removeEntryFromDeploymentCacheFailure("case", caseDefinition.getId(), e);
}
}
}
protected void removeAllDecisionDefinitionsByDeploymentId(String deploymentId) {
// remove all case definitions for a specific deployment
List<DecisionDefinition> allDefinitionsForDeployment = new DecisionDefinitionQueryImpl()
.deploymentId(deploymentId)
.list();
for (DecisionDefinition decisionDefinition : allDefinitionsForDeployment) {
try {
removeDecisionDefinition(decisionDefinition.getId());
} catch(Exception e) {
LOG.removeEntryFromDeploymentCacheFailure("decision", decisionDefinition.getId(), e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* @todo multi threaded compiling of the same class but with different roots
* for compilation... T1 compiles A, which uses B, T2 compiles B... mark A and B
* as parsed and then synchronize compilation. Problems: How to synchronize?
* How to get error messages?
*
*/
package groovy.lang;
import org.codehaus.groovy.ast.ClassHelper;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.FieldNode;
import org.codehaus.groovy.ast.InnerClassNode;
import org.codehaus.groovy.ast.ModuleNode;
import org.codehaus.groovy.ast.expr.ConstantExpression;
import org.codehaus.groovy.classgen.GeneratorContext;
import org.codehaus.groovy.classgen.Verifier;
import org.codehaus.groovy.control.*;
import org.codehaus.groovy.runtime.IOGroovyMethods;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.Opcodes;
import java.io.*;
import java.net.*;
import java.security.*;
import java.util.*;
import java.util.regex.Pattern;
/**
* A ClassLoader which can load Groovy classes. The loaded classes are cached,
* classes from other classloaders should not be cached. To be able to load a
* script that was asked for earlier but was created later it is essential not
* to keep anything like a "class not found" information for that class name.
* This includes possible parent loaders. Classes that are not cached are always
* reloaded.
*
* @author <a href="mailto:james@coredevelopers.net">James Strachan</a>
* @author Guillaume Laforge
* @author Steve Goetze
* @author Bing Ran
* @author <a href="mailto:scottstirling@rcn.com">Scott Stirling</a>
* @author <a href="mailto:blackdrag@gmx.org">Jochen Theodorou</a>
*/
public class GroovyClassLoader extends URLClassLoader {
private static final URL[] EMPTY_URL_ARRAY = new URL[0];
/**
* this cache contains the loaded classes or PARSING, if the class is currently parsed
*/
protected final Map<String, Class> classCache = new HashMap<String, Class>();
/**
* This cache contains mappings of file name to class. It is used
* to bypass compilation.
*/
protected final Map<String, Class> sourceCache = new HashMap<String, Class>();
private final CompilerConfiguration config;
private Boolean recompile;
// use 1000000 as offset to avoid conflicts with names form the GroovyShell
private static int scriptNameCounter = 1000000;
private GroovyResourceLoader resourceLoader = new GroovyResourceLoader() {
public URL loadGroovySource(final String filename) throws MalformedURLException {
return AccessController.doPrivileged(new PrivilegedAction<URL>() {
public URL run() {
for (String extension : config.getScriptExtensions()) {
try {
URL ret = getSourceFile(filename, extension);
if (ret != null)
return ret;
} catch (Throwable t) { //
}
}
return null;
}
});
}
};
/**
* creates a GroovyClassLoader using the current Thread's context
* Class loader as parent.
*/
public GroovyClassLoader() {
this(Thread.currentThread().getContextClassLoader());
}
/**
* creates a GroovyClassLoader using the given ClassLoader as parent
*/
public GroovyClassLoader(ClassLoader loader) {
this(loader, null);
}
/**
* creates a GroovyClassLoader using the given GroovyClassLoader as parent.
* This loader will get the parent's CompilerConfiguration
*/
public GroovyClassLoader(GroovyClassLoader parent) {
this(parent, parent.config, false);
}
/**
* creates a GroovyClassLoader.
*
* @param parent the parent class loader
* @param config the compiler configuration
* @param useConfigurationClasspath determines if the configurations classpath should be added
*/
public GroovyClassLoader(ClassLoader parent, CompilerConfiguration config, boolean useConfigurationClasspath) {
super(EMPTY_URL_ARRAY, parent);
if (config == null) config = CompilerConfiguration.DEFAULT;
this.config = config;
if (useConfigurationClasspath) {
for (String path : config.getClasspath()) {
this.addClasspath(path);
}
}
}
/**
* creates a GroovyClassLoader using the given ClassLoader as parent.
*/
public GroovyClassLoader(ClassLoader loader, CompilerConfiguration config) {
this(loader, config, true);
}
public void setResourceLoader(GroovyResourceLoader resourceLoader) {
if (resourceLoader == null) {
throw new IllegalArgumentException("Resource loader must not be null!");
}
this.resourceLoader = resourceLoader;
}
public GroovyResourceLoader getResourceLoader() {
return resourceLoader;
}
/**
* Loads the given class node returning the implementation Class.
* <p>
* WARNING: this compilation is not synchronized
*
* @param classNode
* @return a class
*/
public Class defineClass(ClassNode classNode, String file, String newCodeBase) {
CodeSource codeSource = null;
try {
codeSource = new CodeSource(new URL("file", "", newCodeBase), (java.security.cert.Certificate[]) null);
} catch (MalformedURLException e) {
//swallow
}
CompilationUnit unit = createCompilationUnit(config, codeSource);
ClassCollector collector = createCollector(unit, classNode.getModule().getContext());
try {
unit.addClassNode(classNode);
unit.setClassgenCallback(collector);
unit.compile(Phases.CLASS_GENERATION);
definePackageInternal(collector.generatedClass.getName());
return collector.generatedClass;
} catch (CompilationFailedException e) {
throw new RuntimeException(e);
}
}
/**
* Parses the given file into a Java class capable of being run
*
* @param file the file name to parse
* @return the main class defined in the given script
*/
public Class parseClass(File file) throws CompilationFailedException, IOException {
return parseClass(new GroovyCodeSource(file, config.getSourceEncoding()));
}
/**
* Parses the given text into a Java class capable of being run
*
* @param text the text of the script/class to parse
* @param fileName the file name to use as the name of the class
* @return the main class defined in the given script
*/
public Class parseClass(final String text, final String fileName) throws CompilationFailedException {
GroovyCodeSource gcs = AccessController.doPrivileged(new PrivilegedAction<GroovyCodeSource>() {
public GroovyCodeSource run() {
return new GroovyCodeSource(text, fileName, "/groovy/script");
}
});
gcs.setCachable(false);
return parseClass(gcs);
}
/**
* Parses the given text into a Java class capable of being run
*
* @param text the text of the script/class to parse
* @return the main class defined in the given script
*/
public Class parseClass(String text) throws CompilationFailedException {
return parseClass(text, "script" + System.currentTimeMillis() +
Math.abs(text.hashCode()) + ".groovy");
}
public synchronized String generateScriptName() {
scriptNameCounter++;
return "script" + scriptNameCounter + ".groovy";
}
/**
* @deprecated Prefer using methods taking a Reader rather than an InputStream to avoid wrong encoding issues.
*/
public Class parseClass(final InputStream in, final String fileName) throws CompilationFailedException {
// For generic input streams, provide a catch-all codebase of GroovyScript
// Security for these classes can be administered via policy grants with
// a codebase of file:groovy.script
GroovyCodeSource gcs = AccessController.doPrivileged(new PrivilegedAction<GroovyCodeSource>() {
public GroovyCodeSource run() {
try {
String scriptText = config.getSourceEncoding() != null ?
IOGroovyMethods.getText(in, config.getSourceEncoding()) :
IOGroovyMethods.getText(in);
return new GroovyCodeSource(scriptText, fileName, "/groovy/script");
} catch (IOException e) {
throw new RuntimeException("Impossible to read the content of the input stream for file named: " + fileName, e);
}
}
});
return parseClass(gcs);
}
public Class parseClass(GroovyCodeSource codeSource) throws CompilationFailedException {
return parseClass(codeSource, codeSource.isCachable());
}
/**
* Parses the given code source into a Java class. If there is a class file
* for the given code source, then no parsing is done, instead the cached class is returned.
*
* @param shouldCacheSource if true then the generated class will be stored in the source cache
* @return the main class defined in the given script
*/
public Class parseClass(GroovyCodeSource codeSource, boolean shouldCacheSource) throws CompilationFailedException {
synchronized (sourceCache) {
Class answer = sourceCache.get(codeSource.getName());
if (answer != null) return answer;
answer = doParseClass(codeSource);
if (shouldCacheSource) sourceCache.put(codeSource.getName(), answer);
return answer;
}
}
private Class doParseClass(GroovyCodeSource codeSource) {
validate(codeSource);
Class answer; // Was neither already loaded nor compiling, so compile and add to cache.
CompilationUnit unit = createCompilationUnit(config, codeSource.getCodeSource());
if (recompile!=null && recompile || recompile==null && config.getRecompileGroovySource()) {
unit.addFirstPhaseOperation(TimestampAdder.INSTANCE, CompilePhase.CLASS_GENERATION.getPhaseNumber());
}
SourceUnit su = null;
File file = codeSource.getFile();
if (file != null) {
su = unit.addSource(file);
} else {
URL url = codeSource.getURL();
if (url != null) {
su = unit.addSource(url);
} else {
su = unit.addSource(codeSource.getName(), codeSource.getScriptText());
}
}
ClassCollector collector = createCollector(unit, su);
unit.setClassgenCallback(collector);
int goalPhase = Phases.CLASS_GENERATION;
if (config != null && config.getTargetDirectory() != null) goalPhase = Phases.OUTPUT;
unit.compile(goalPhase);
answer = collector.generatedClass;
String mainClass = su.getAST().getMainClassName();
for (Object o : collector.getLoadedClasses()) {
Class clazz = (Class) o;
String clazzName = clazz.getName();
definePackageInternal(clazzName);
setClassCacheEntry(clazz);
if (clazzName.equals(mainClass)) answer = clazz;
}
return answer;
}
private void validate(GroovyCodeSource codeSource) {
if (codeSource.getFile() == null) {
if (codeSource.getScriptText() == null) {
throw new IllegalArgumentException("Script text to compile cannot be null!");
}
}
}
private void definePackageInternal(String className) {
int i = className.lastIndexOf('.');
if (i != -1) {
String pkgName = className.substring(0, i);
java.lang.Package pkg = getPackage(pkgName);
if (pkg == null) {
definePackage(pkgName, null, null, null, null, null, null, null);
}
}
}
/**
* gets the currently used classpath.
*
* @return a String[] containing the file information of the urls
* @see #getURLs()
*/
protected String[] getClassPath() {
//workaround for Groovy-835
URL[] urls = getURLs();
String[] ret = new String[urls.length];
for (int i = 0; i < ret.length; i++) {
ret[i] = urls[i].getFile();
}
return ret;
}
protected PermissionCollection getPermissions(CodeSource codeSource) {
PermissionCollection perms;
try {
try {
perms = super.getPermissions(codeSource);
} catch (SecurityException e) {
// We lied about our CodeSource and that makes URLClassLoader unhappy.
perms = new Permissions();
}
ProtectionDomain myDomain = AccessController.doPrivileged(new PrivilegedAction<ProtectionDomain>() {
public ProtectionDomain run() {
return getClass().getProtectionDomain();
}
});
PermissionCollection myPerms = myDomain.getPermissions();
if (myPerms != null) {
for (Enumeration<Permission> elements = myPerms.elements(); elements.hasMoreElements();) {
perms.add(elements.nextElement());
}
}
} catch (Throwable e) {
// We lied about our CodeSource and that makes URLClassLoader unhappy.
perms = new Permissions();
}
perms.setReadOnly();
return perms;
}
public static class InnerLoader extends GroovyClassLoader {
private final GroovyClassLoader delegate;
private final long timeStamp;
public InnerLoader(GroovyClassLoader delegate) {
super(delegate);
this.delegate = delegate;
timeStamp = System.currentTimeMillis();
}
public void addClasspath(String path) {
delegate.addClasspath(path);
}
public void clearCache() {
delegate.clearCache();
}
public URL findResource(String name) {
return delegate.findResource(name);
}
public Enumeration findResources(String name) throws IOException {
return delegate.findResources(name);
}
public Class[] getLoadedClasses() {
return delegate.getLoadedClasses();
}
public URL getResource(String name) {
return delegate.getResource(name);
}
public InputStream getResourceAsStream(String name) {
return delegate.getResourceAsStream(name);
}
public GroovyResourceLoader getResourceLoader() {
return delegate.getResourceLoader();
}
public URL[] getURLs() {
return delegate.getURLs();
}
public Class loadClass(String name, boolean lookupScriptFiles, boolean preferClassOverScript, boolean resolve) throws ClassNotFoundException, CompilationFailedException {
Class c = findLoadedClass(name);
if (c != null) return c;
return delegate.loadClass(name, lookupScriptFiles, preferClassOverScript, resolve);
}
public Class parseClass(GroovyCodeSource codeSource, boolean shouldCache) throws CompilationFailedException {
return delegate.parseClass(codeSource, shouldCache);
}
public void setResourceLoader(GroovyResourceLoader resourceLoader) {
delegate.setResourceLoader(resourceLoader);
}
public void addURL(URL url) {
delegate.addURL(url);
}
public long getTimeStamp() {
return timeStamp;
}
}
/**
* creates a new CompilationUnit. If you want to add additional
* phase operations to the CompilationUnit (for example to inject
* additional methods, variables, fields), then you should overwrite
* this method.
*
* @param config the compiler configuration, usually the same as for this class loader
* @param source the source containing the initial file to compile, more files may follow during compilation
* @return the CompilationUnit
*/
protected CompilationUnit createCompilationUnit(CompilerConfiguration config, CodeSource source) {
return new CompilationUnit(config, source, this);
}
/**
* creates a ClassCollector for a new compilation.
*
* @param unit the compilationUnit
* @param su the SourceUnit
* @return the ClassCollector
*/
protected ClassCollector createCollector(CompilationUnit unit, SourceUnit su) {
InnerLoader loader = AccessController.doPrivileged(new PrivilegedAction<InnerLoader>() {
public InnerLoader run() {
return new InnerLoader(GroovyClassLoader.this);
}
});
return new ClassCollector(loader, unit, su);
}
public static class ClassCollector extends CompilationUnit.ClassgenCallback {
private Class generatedClass;
private final GroovyClassLoader cl;
private final SourceUnit su;
private final CompilationUnit unit;
private final Collection<Class> loadedClasses;
protected ClassCollector(InnerLoader cl, CompilationUnit unit, SourceUnit su) {
this.cl = cl;
this.unit = unit;
this.loadedClasses = new ArrayList<Class>();
this.su = su;
}
public GroovyClassLoader getDefiningClassLoader() {
return cl;
}
protected Class createClass(byte[] code, ClassNode classNode) {
BytecodeProcessor bytecodePostprocessor = unit.getConfiguration().getBytecodePostprocessor();
byte[] fcode = code;
if (bytecodePostprocessor!=null) {
fcode = bytecodePostprocessor.processBytecode(classNode.getName(), fcode);
}
GroovyClassLoader cl = getDefiningClassLoader();
Class theClass = cl.defineClass(classNode.getName(), fcode, 0, fcode.length, unit.getAST().getCodeSource());
this.loadedClasses.add(theClass);
if (generatedClass == null) {
ModuleNode mn = classNode.getModule();
SourceUnit msu = null;
if (mn != null) msu = mn.getContext();
ClassNode main = null;
if (mn != null) main = (ClassNode) mn.getClasses().get(0);
if (msu == su && main == classNode) generatedClass = theClass;
}
return theClass;
}
protected Class onClassNode(ClassWriter classWriter, ClassNode classNode) {
byte[] code = classWriter.toByteArray();
return createClass(code, classNode);
}
public void call(ClassVisitor classWriter, ClassNode classNode) {
onClassNode((ClassWriter) classWriter, classNode);
}
public Collection getLoadedClasses() {
return this.loadedClasses;
}
}
/**
* open up the super class define that takes raw bytes
*/
public Class defineClass(String name, byte[] b) {
return super.defineClass(name, b, 0, b.length);
}
/**
* loads a class from a file or a parent classloader.
* This method does call loadClass(String, boolean, boolean, boolean)
* with the last parameter set to false.
*
* @throws CompilationFailedException if compilation was not successful
*/
public Class loadClass(final String name, boolean lookupScriptFiles, boolean preferClassOverScript)
throws ClassNotFoundException, CompilationFailedException {
return loadClass(name, lookupScriptFiles, preferClassOverScript, false);
}
/**
* gets a class from the class cache. This cache contains only classes loaded through
* this class loader or an InnerLoader instance. If no class is stored for a
* specific name, then the method should return null.
*
* @param name of the class
* @return the class stored for the given name
* @see #removeClassCacheEntry(String)
* @see #setClassCacheEntry(Class)
* @see #clearCache()
*/
protected Class getClassCacheEntry(String name) {
if (name == null) return null;
synchronized (classCache) {
return classCache.get(name);
}
}
/**
* sets an entry in the class cache.
*
* @param cls the class
* @see #removeClassCacheEntry(String)
* @see #getClassCacheEntry(String)
* @see #clearCache()
*/
protected void setClassCacheEntry(Class cls) {
synchronized (classCache) {
classCache.put(cls.getName(), cls);
}
}
/**
* removes a class from the class cache.
*
* @param name of the class
* @see #getClassCacheEntry(String)
* @see #setClassCacheEntry(Class)
* @see #clearCache()
*/
protected void removeClassCacheEntry(String name) {
synchronized (classCache) {
classCache.remove(name);
}
}
/**
* adds a URL to the classloader.
*
* @param url the new classpath element
*/
public void addURL(URL url) {
super.addURL(url);
}
/**
* Indicates if a class is recompilable. Recompilable means, that the classloader
* will try to locate a groovy source file for this class and then compile it again,
* adding the resulting class as entry to the cache. Giving null as class is like a
* recompilation, so the method should always return true here. Only classes that are
* implementing GroovyObject are compilable and only if the timestamp in the class
* is lower than Long.MAX_VALUE.
* <p>
* NOTE: First the parent loaders will be asked and only if they don't return a
* class the recompilation will happen. Recompilation also only happen if the source
* file is newer.
*
* @param cls the class to be tested. If null the method should return true
* @return true if the class should be compiled again
* @see #isSourceNewer(URL, Class)
*/
protected boolean isRecompilable(Class cls) {
if (cls == null) return true;
if (cls.getClassLoader() == this) return false;
if (recompile == null && !config.getRecompileGroovySource()) return false;
if (recompile != null && !recompile) return false;
if (!GroovyObject.class.isAssignableFrom(cls)) return false;
long timestamp = getTimeStamp(cls);
if (timestamp == Long.MAX_VALUE) return false;
return true;
}
/**
* sets if the recompilation should be enable. There are 3 possible
* values for this. Any value different than null overrides the
* value from the compiler configuration. true means to recompile if needed
* false means to never recompile.
*
* @param mode the recompilation mode
* @see CompilerConfiguration
*/
public void setShouldRecompile(Boolean mode) {
recompile = mode;
}
/**
* gets the currently set recompilation mode. null means, the
* compiler configuration is used. False means no recompilation and
* true means that recompilation will be done if needed.
*
* @return the recompilation mode
*/
public Boolean isShouldRecompile() {
return recompile;
}
/**
* loads a class from a file or a parent classloader.
*
* @param name of the class to be loaded
* @param lookupScriptFiles if false no lookup at files is done at all
* @param preferClassOverScript if true the file lookup is only done if there is no class
* @param resolve see {@link java.lang.ClassLoader#loadClass(java.lang.String, boolean)}
* @return the class found or the class created from a file lookup
* @throws ClassNotFoundException if the class could not be found
* @throws CompilationFailedException if the source file could not be compiled
*/
public Class loadClass(final String name, boolean lookupScriptFiles, boolean preferClassOverScript, boolean resolve)
throws ClassNotFoundException, CompilationFailedException {
// look into cache
Class cls = getClassCacheEntry(name);
// enable recompilation?
boolean recompile = isRecompilable(cls);
if (!recompile) return cls;
// try parent loader
ClassNotFoundException last = null;
try {
Class parentClassLoaderClass = super.loadClass(name, resolve);
// always return if the parent loader was successful
if (cls != parentClassLoaderClass) return parentClassLoaderClass;
} catch (ClassNotFoundException cnfe) {
last = cnfe;
} catch (NoClassDefFoundError ncdfe) {
if (ncdfe.getMessage().indexOf("wrong name") > 0) {
last = new ClassNotFoundException(name);
} else {
throw ncdfe;
}
}
// check security manager
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
String className = name.replace('/', '.');
int i = className.lastIndexOf('.');
// no checks on the sun.reflect classes for reflection speed-up
// in particular ConstructorAccessorImpl, MethodAccessorImpl, FieldAccessorImpl and SerializationConstructorAccessorImpl
// which are generated at runtime by the JDK
if (i != -1 && !className.startsWith("sun.reflect.")) {
sm.checkPackageAccess(className.substring(0, i));
}
}
// prefer class if no recompilation
if (cls != null && preferClassOverScript) return cls;
// at this point the loading from a parent loader failed
// and we want to recompile if needed.
if (lookupScriptFiles) {
// try groovy file
try {
// check if recompilation already happened.
final Class classCacheEntry = getClassCacheEntry(name);
if (classCacheEntry != cls) return classCacheEntry;
URL source = resourceLoader.loadGroovySource(name);
// if recompilation fails, we want cls==null
Class oldClass = cls;
cls = null;
cls = recompile(source, name, oldClass);
} catch (IOException ioe) {
last = new ClassNotFoundException("IOException while opening groovy source: " + name, ioe);
} finally {
if (cls == null) {
removeClassCacheEntry(name);
} else {
setClassCacheEntry(cls);
}
}
}
if (cls == null) {
// no class found, there should have been an exception before now
if (last == null) throw new AssertionError(true);
throw last;
}
return cls;
}
/**
* (Re)Compiles the given source.
* This method starts the compilation of a given source, if
* the source has changed since the class was created. For
* this isSourceNewer is called.
*
* @param source the source pointer for the compilation
* @param className the name of the class to be generated
* @param oldClass a possible former class
* @return the old class if the source wasn't new enough, the new class else
* @throws CompilationFailedException if the compilation failed
* @throws IOException if the source is not readable
* @see #isSourceNewer(URL, Class)
*/
protected Class recompile(URL source, String className, Class oldClass) throws CompilationFailedException, IOException {
if (source != null) {
// found a source, compile it if newer
if ((oldClass != null && isSourceNewer(source, oldClass)) || (oldClass == null)) {
synchronized (sourceCache) {
String name = source.toExternalForm();
sourceCache.remove(name);
if (isFile(source)) {
try {
return parseClass(new GroovyCodeSource(new File(source.toURI()), config.getSourceEncoding()));
} catch (URISyntaxException e) {
// do nothing and fall back to the other version
}
}
return parseClass(source.openStream(), name);
}
}
}
return oldClass;
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return loadClass(name, false);
}
/**
* Implemented here to check package access prior to returning an
* already loaded class.
*
* @throws CompilationFailedException if the compilation failed
* @throws ClassNotFoundException if the class was not found
* @see java.lang.ClassLoader#loadClass(java.lang.String, boolean)
*/
protected Class loadClass(final String name, boolean resolve) throws ClassNotFoundException {
return loadClass(name, true, true, resolve);
}
/**
* gets the time stamp of a given class. For groovy
* generated classes this usually means to return the value
* of the static field __timeStamp. If the parameter doesn't
* have such a field, then Long.MAX_VALUE is returned
*
* @param cls the class
* @return the time stamp
*/
protected long getTimeStamp(Class cls) {
return Verifier.getTimestamp(cls);
}
/**
* This method will take a file name and try to "decode" any URL encoded characters. For example
* if the file name contains any spaces this method call will take the resulting %20 encoded values
* and convert them to spaces.
* <p>
* This method was added specifically to fix defect: Groovy-1787. The defect involved a situation
* where two scripts were sitting in a directory with spaces in its name. The code would fail
* when the class loader tried to resolve the file name and would choke on the URLEncoded space values.
*/
private String decodeFileName(String fileName) {
String decodedFile = fileName;
try {
decodedFile = URLDecoder.decode(fileName, "UTF-8");
} catch (UnsupportedEncodingException e) {
System.err.println("Encountered an invalid encoding scheme when trying to use URLDecoder.decode() inside of the GroovyClassLoader.decodeFileName() method. Returning the unencoded URL.");
System.err.println("Please note that if you encounter this error and you have spaces in your directory you will run into issues. Refer to GROOVY-1787 for description of this bug.");
}
return decodedFile;
}
private boolean isFile(URL ret) {
return ret != null && ret.getProtocol().equals("file");
}
private File getFileForUrl(URL ret, String filename) {
String fileWithoutPackage = filename;
if (fileWithoutPackage.indexOf('/') != -1) {
int index = fileWithoutPackage.lastIndexOf('/');
fileWithoutPackage = fileWithoutPackage.substring(index + 1);
}
return fileReallyExists(ret, fileWithoutPackage);
}
private File fileReallyExists(URL ret, String fileWithoutPackage) {
File path;
try {
/* fix for GROOVY-5809 */
path = new File(ret.toURI());
} catch(URISyntaxException e) {
path = new File(decodeFileName(ret.getFile()));
}
path = path.getParentFile();
if (path.exists() && path.isDirectory()) {
File file = new File(path, fileWithoutPackage);
if (file.exists()) {
// file.exists() might be case insensitive. Let's do
// case sensitive match for the filename
File parent = file.getParentFile();
for (String child : parent.list()) {
if (child.equals(fileWithoutPackage)) return file;
}
}
}
//file does not exist!
return null;
}
private URL getSourceFile(String name, String extension) {
String filename = name.replace('.', '/') + "." + extension;
URL ret = getResource(filename);
if (isFile(ret) && getFileForUrl(ret, filename) == null) return null;
return ret;
}
/**
* Decides if the given source is newer than a class.
*
* @param source the source we may want to compile
* @param cls the former class
* @return true if the source is newer, false else
* @throws IOException if it is not possible to open an
* connection for the given source
* @see #getTimeStamp(Class)
*/
protected boolean isSourceNewer(URL source, Class cls) throws IOException {
long lastMod;
// Special handling for file:// protocol, as getLastModified() often reports
// incorrect results (-1)
if (isFile(source)) {
// Coerce the file URL to a File
// See ClassNodeResolver.isSourceNewer for another method that replaces '|' with ':'.
// WTF: Why is this done and where is it documented?
String path = source.getPath().replace('/', File.separatorChar).replace('|', ':');
File file = new File(path);
lastMod = file.lastModified();
} else {
URLConnection conn = source.openConnection();
lastMod = conn.getLastModified();
conn.getInputStream().close();
}
long classTime = getTimeStamp(cls);
return classTime + config.getMinimumRecompilationInterval() < lastMod;
}
/**
* adds a classpath to this classloader.
*
* @param path is a jar file or a directory.
* @see #addURL(URL)
*/
public void addClasspath(final String path) {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
URI newURI;
try {
newURI = new URI(path);
// check if we can create a URL from that URI
newURI.toURL();
} catch (URISyntaxException e) {
// the URI has a false format, so lets try it with files ...
newURI=new File(path).toURI();
} catch (MalformedURLException e) {
// the URL has a false format, so lets try it with files ...
newURI=new File(path).toURI();
} catch (IllegalArgumentException e) {
// the URL is not absolute, so lets try it with files ...
newURI=new File(path).toURI();
}
URL[] urls = getURLs();
for (URL url : urls) {
// Do not use URL.equals. It uses the network to resolve names and compares ip addresses!
// That is a violation of RFC and just plain evil.
// http://michaelscharf.blogspot.com/2006/11/javaneturlequals-and-hashcode-make.html
// http://docs.oracle.com/javase/7/docs/api/java/net/URL.html#equals(java.lang.Object)
// "Since hosts comparison requires name resolution, this operation is a blocking operation.
// Note: The defined behavior for equals is known to be inconsistent with virtual hosting in HTTP."
try {
if (newURI.equals(url.toURI())) return null;
} catch (URISyntaxException e) {
// fail fast! if we got a malformed URI the Classloader has to tell it
throw new RuntimeException( e );
}
}
try {
addURL(newURI.toURL());
} catch (MalformedURLException e) {
// fail fast! if we got a malformed URL the Classloader has to tell it
throw new RuntimeException( e );
}
return null;
}
});
}
/**
* <p>Returns all Groovy classes loaded by this class loader.
*
* @return all classes loaded by this class loader
*/
public Class[] getLoadedClasses() {
synchronized (classCache) {
final Collection<Class> values = classCache.values();
return values.toArray(new Class[values.size()]);
}
}
/**
* Removes all classes from the class cache.
*
* @see #getClassCacheEntry(String)
* @see #setClassCacheEntry(Class)
* @see #removeClassCacheEntry(String)
*/
public void clearCache() {
synchronized (classCache) {
classCache.clear();
}
synchronized (sourceCache) {
sourceCache.clear();
}
}
private static class TimestampAdder extends CompilationUnit.PrimaryClassNodeOperation implements Opcodes {
private final static TimestampAdder INSTANCE = new TimestampAdder();
private TimestampAdder() {}
protected void addTimeStamp(ClassNode node) {
if (node.getDeclaredField(Verifier.__TIMESTAMP) == null) { // in case if verifier visited the call already
FieldNode timeTagField = new FieldNode(
Verifier.__TIMESTAMP,
ACC_PUBLIC | ACC_STATIC | ACC_SYNTHETIC,
ClassHelper.long_TYPE,
//"",
node,
new ConstantExpression(System.currentTimeMillis()));
// alternatively, FieldNode timeTagField = SourceUnit.createFieldNode("public static final long __timeStamp = " + System.currentTimeMillis() + "L");
timeTagField.setSynthetic(true);
node.addField(timeTagField);
timeTagField = new FieldNode(
Verifier.__TIMESTAMP__ + String.valueOf(System.currentTimeMillis()),
ACC_PUBLIC | ACC_STATIC | ACC_SYNTHETIC,
ClassHelper.long_TYPE,
//"",
node,
new ConstantExpression((long) 0));
// alternatively, FieldNode timeTagField = SourceUnit.createFieldNode("public static final long __timeStamp = " + System.currentTimeMillis() + "L");
timeTagField.setSynthetic(true);
node.addField(timeTagField);
}
}
@Override
public void call(final SourceUnit source, final GeneratorContext context, final ClassNode classNode) throws CompilationFailedException {
if ((classNode.getModifiers() & Opcodes.ACC_INTERFACE) > 0) {
// does not apply on interfaces
return;
}
if (!(classNode instanceof InnerClassNode)) {
addTimeStamp(classNode);
}
}
}
}
| |
package com.quakearts.syshub.test;
import static org.junit.Assert.*;
import static org.hamcrest.core.Is.*;
import static org.hamcrest.core.IsNull.*;
import static org.hamcrest.core.IsNot.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.logging.LogManager;
import javax.enterprise.inject.spi.CDI;
import javax.transaction.UserTransaction;
import org.junit.BeforeClass;
import org.junit.Test;
import com.quakearts.appbase.Main;
import com.quakearts.appbase.spi.factory.JavaTransactionManagerSpiFactory;
import com.quakearts.syshub.SysHub;
import com.quakearts.syshub.SysHubMain;
import com.quakearts.syshub.agent.ProcessingAgent;
import com.quakearts.syshub.agent.builder.ProcessingAgentBuilder;
import com.quakearts.syshub.core.DataSpooler;
import com.quakearts.syshub.core.Message;
import com.quakearts.syshub.core.MessageFormatter;
import com.quakearts.syshub.core.Messenger;
import com.quakearts.syshub.core.Result;
import com.quakearts.syshub.core.event.ParameterEvent;
import com.quakearts.syshub.core.event.ParameterEvent.EventType;
import com.quakearts.syshub.core.event.ParameterEventBroadcaster;
import com.quakearts.syshub.core.event.ParameterEventListener;
import com.quakearts.syshub.core.factory.DataSpoolerFactory;
import com.quakearts.syshub.core.factory.MessageFormatterFactory;
import com.quakearts.syshub.core.factory.MessengerFactory;
import com.quakearts.syshub.core.impl.MessageByteImpl;
import com.quakearts.syshub.core.impl.MessageStringImpl;
import com.quakearts.syshub.core.impl.ResultImpl;
import com.quakearts.syshub.core.runner.AgentRunner;
import com.quakearts.syshub.core.runner.impl.LoopedAgentRunner;
import com.quakearts.syshub.core.runner.impl.ScheduledAgentRunner;
import com.quakearts.syshub.core.runner.impl.TriggeredAgentRunner;
import com.quakearts.syshub.core.utils.MapRowBuilder;
import com.quakearts.syshub.core.utils.Serializer;
import com.quakearts.syshub.exception.ConfigurationException;
import com.quakearts.syshub.exception.ProcessingException;
import com.quakearts.syshub.log.LoggerImpl;
import com.quakearts.syshub.model.AgentConfiguration;
import com.quakearts.syshub.model.AgentConfigurationParameter;
import com.quakearts.syshub.model.AgentConfiguration.RunType;
import com.quakearts.syshub.model.AgentConfigurationModuleMapping;
import com.quakearts.syshub.model.AgentConfigurationParameter.ParameterType;
import com.quakearts.syshub.model.AgentModule.ModuleType;
import com.quakearts.syshub.model.MaxID;
import com.quakearts.syshub.model.ProcessingLog.LogType;
import com.quakearts.syshub.model.ResultExceptionLog;
import com.quakearts.syshub.model.TransactionLog;
import com.quakearts.syshub.model.VariableCache;
import com.quakearts.syshub.model.AgentModule;
import com.quakearts.syshub.model.ProcessingLog;
import com.quakearts.syshub.test.helper.ErrorObserver;
import com.quakearts.syshub.test.helper.ErrorThrowingModule;
import com.quakearts.syshub.test.helper.ErrorThrowingModule.ExceptionLocation;
import com.quakearts.syshub.test.helper.ShutdownMonitor;
import com.quakearts.syshub.test.helper.TestAgentTrigger2;
import com.quakearts.syshub.test.helper.TestDataSpooler;
import com.quakearts.syshub.test.helper.TestFormatter1;
import com.quakearts.syshub.test.helper.TestFormatter2;
import com.quakearts.syshub.test.helper.TestMessenger1;
import com.quakearts.syshub.test.helper.TestMessenger2;
import com.quakearts.syshub.test.helper.Trigger1;
import com.quakearts.syshub.test.helper.Trigger2;
import com.quakearts.webapp.orm.DataStore;
import com.quakearts.webapp.orm.DataStoreFactory;
public class TestSysHub {
@BeforeClass
public static void startAndSetup() {
try {
LogManager.getLogManager().readConfiguration(Thread.currentThread().getContextClassLoader().getResourceAsStream("logging.properties"));
} catch (SecurityException | IOException e) {
}
StartUp.getStartUp().initiateSystem();
}
@Test
public void testStartingSysHubMainTwice() throws Exception {
try {
SysHubMain sysHubMain = new SysHubMain();
sysHubMain.init();//If not blocked will result in nullpointer exception
} catch (Exception e) {
fail(e.getMessage());
}
}
@Test
public void testAgentsDeployed() throws Exception {
SysHub sysHubMain = CDI.current().select(SysHub.class).get();
assertThat(sysHubMain, is(notNullValue()));
assertThat(sysHubMain.listAgentRunners().size(), is(3));
UserTransaction transaction = JavaTransactionManagerSpiFactory
.getInstance()
.getJavaTransactionManagerSpi()
.getUserTransaction();
transaction.begin();
List<AgentConfiguration> agentConfigurations;
try {
agentConfigurations = DataStoreFactory
.getInstance()
.getDataStore("system")
.find(AgentConfiguration.class)
.filterBy("active")
.withAValueEqualTo(Boolean.TRUE)
.thenList();
assertThat(agentConfigurations.size(), is(4));
Trigger1 trigger = CDI.current().select(Trigger1.class).get();
trigger.fire();
pause(2000);//give Looped and Scheduled Agents some time to run
int countDeployed=0;
for(AgentConfiguration agentConfiguration:agentConfigurations) {
if(!sysHubMain.isDeployed(agentConfiguration))
continue;
countDeployed++;
AgentRunner agentRunner = sysHubMain.fetchAgentRunner(agentConfiguration);
assertThat(agentRunner, is(notNullValue()));
switch (agentConfiguration.getType()) {
case LOOPED:
assertThat(agentRunner instanceof LoopedAgentRunner, is(true));
assertThat(agentRunner.getRunType(), is(RunType.LOOPED));
break;
case SCHEDULED:
assertThat(agentRunner instanceof ScheduledAgentRunner, is(true));
assertThat(agentRunner.getRunType(), is(RunType.SCHEDULED));
break;
case TRIGGERED:
assertThat(agentRunner.getRunType(), is(RunType.TRIGGERED));
assertThat(agentRunner instanceof TriggeredAgentRunner, is(true));
break;
default:
break;
}
validateAgentRunner(agentConfiguration, agentRunner);
assertThat(agentRunner.start(), is(true));
}
assertThat(countDeployed, is(3));
} finally {
transaction.commit();
}
}
private void validateAgentRunner(AgentConfiguration agentConfiguration, AgentRunner agentRunner)
throws ConfigurationException {
System.out.println("Validating "+agentConfiguration.getAgentName());
assertThat(agentRunner.isShutDown(), is(false));
assertThat(agentRunner.isInErrorState(), is(false));
assertThat(agentRunner.isInRestartRequiredMode(), is(false));
assertThat(agentRunner.isRunning(), is(true));
ProcessingAgent agent = agentRunner.getProcessingAgent();
//Confirm proper mapping happened
assertThat(agent.getAgentConfiguration().getId(), is(agentConfiguration.getId()));
assertThat(agent.getAgentConfiguration().getAgentName(), is(agentConfiguration.getAgentName()));
assertThat(agent.getName(), is(agentConfiguration.getAgentName()));
//Stats indicating the agent is actually working
assertThat(agent.getStartTime(), is(notNullValue()));
assertThat(agent.getLastRunTime(), is(notNullValue()));
Main.log.trace(agent.getName()+" getTaskCount() "+agent.getTaskCount());
Main.log.trace(agent.getName()+" getCompletedTaskCount() "+agent.getCompletedTaskCount());
Main.log.trace(agent.getName()+" getAgentWorkersCreated() "+agent.getAgentWorkersCreated());
Main.log.trace(agent.getName()+" getDataSpoolerWorkersCreated() "+agent.getDataSpoolerWorkersCreated());
Main.log.trace(agent.getName()+" getFormatterMessengerWorkersCreated() "+agent.getFormatterMessengerWorkersCreated());
Main.log.trace(agent.getName()+" getLargestPoolSize() "+agent.getLargestPoolSize());
//Confirm configuration is correct
assertThat(agent.getMessengerFormatterMapper().size()>0, is(true));
assertThat(agent.getKeepAliveTime(), is(60L));
assertThat(agent.getQueueSize(), is(10));
switch (agentConfiguration.getAgentName()) {
case "Test Agent 1":
assertThat(agent.getDataSpoolers().size(), is(1));
assertThat(agent.getMessengerFormatterMapper().size(), is(1));
assertThat(agent.getMaximumPoolSize(), is(5));
assertThat(agent.getMaxDataSpoolerWorkers(), is(5));
assertThat(agent.getDataSpoolerWorkersCreated()<=5, is(true));
assertThat(agent.getMaxFormatterMessengerWorkers(), is(5));
assertThat(agent.getFormatterMessengerWorkersCreated()<=5, is(true));
break;
case "Test Agent 2":
assertThat(agent.getDataSpoolers().size(), is(2));
assertThat(agent.getMessengerFormatterMapper().size(), is(2));
assertThat(agent.getMaximumPoolSize(), is(8));
assertThat(agent.getMaxDataSpoolerWorkers(), is(10));
assertThat(agent.getDataSpoolerWorkersCreated()<=10, is(true));
assertThat(agent.getMaxFormatterMessengerWorkers(), is(10));
assertThat(agent.getFormatterMessengerWorkersCreated()<=10, is(true));
break;
case "Test Agent 3":
assertThat(agent.getDataSpoolers().size(), is(2));
assertThat(agent.getMessengerFormatterMapper().size(), is(1));
assertThat(agent.getMaximumPoolSize(), is(5));
assertThat(agent.getMaxDataSpoolerWorkers(), is(5));
assertThat(agent.getDataSpoolerWorkersCreated()<=5, is(true));
assertThat(agent.getMaxFormatterMessengerWorkers(), is(5));
assertThat(agent.getFormatterMessengerWorkersCreated()<=5, is(true));
break;
default:
break;
}
for(AgentModule module:agentConfiguration.getAgentModules()) {
switch (module.getModuleType()) {
case DATASPOOLER:
DataSpooler spooler = DataSpoolerFactory.getFactory().getInstance(null, module);
assertThat(spooler instanceof TestDataSpooler, is(true));
assertThat(spooler.getAgentConfiguration().getId(), is(agentConfiguration.getId()));
assertThat(spooler.getAgentConfiguration().getAgentName(), is(agentConfiguration.getAgentName()));
assertThat(spooler.getAgentModule().getId(), is(module.getId()));
assertThat(spooler.getAgentModule().getModuleName(), is(module.getModuleName()));
break;
case FORMATTER:
MessageFormatter formatter = MessageFormatterFactory.getFactory().getInstance(null, module);
if(agentConfiguration.getAgentName().equals("Test Agent 2")) {
if(module.getModuleName().equals("Test Module 2.3"))
assertThat(formatter instanceof TestFormatter1, is(true));
else
assertThat(formatter instanceof TestFormatter2, is(true));
} else if(agentConfiguration.getAgentName().equals("Test Agent 3")) {
assertThat(formatter instanceof TestFormatter2, is(true));
} else {
assertThat(formatter instanceof TestFormatter1, is(true));
}
assertThat(formatter.getAgentConfiguration().getId(), is(agentConfiguration.getId()));
assertThat(formatter.getAgentConfiguration().getAgentName(), is(agentConfiguration.getAgentName()));
assertThat(formatter.getAgentModule().getId(), is(module.getId()));
assertThat(formatter.getAgentModule().getModuleName(), is(module.getModuleName()));
break;
case MESSENGER:
Messenger messenger = MessengerFactory.getFactory().getInstance(null, module);
if(agentConfiguration.getAgentName().equals("Test Agent 2")) {
if(module.getModuleName().equals("Test Module 2.5")) {
assertThat(messenger instanceof TestMessenger1, is(true));
Main.log.trace(agent.getName()+" Messenger sent "+((TestMessenger1)messenger).getSendCount());
} else {
assertThat(messenger instanceof TestMessenger2, is(true));
Main.log.trace(agent.getName()+" Messenger sent "+((TestMessenger2)messenger).getSendCount());
}
} else if(agentConfiguration.getAgentName().equals("Test Agent 3")) {
assertThat(messenger instanceof TestMessenger2, is(true));
Main.log.trace(agent.getName()+" Messenger sent "+((TestMessenger2)messenger).getSendCount());
} else {
assertThat(messenger instanceof TestMessenger1, is(true));
Main.log.trace(agent.getName()+" Messenger sent "+((TestMessenger1)messenger).getSendCount());
}
assertThat(messenger.getAgentConfiguration().getId(), is(agentConfiguration.getId()));
assertThat(messenger.getAgentConfiguration().getAgentName(), is(agentConfiguration.getAgentName()));
assertThat(messenger.getAgentModule().getId(), is(module.getId()));
assertThat(messenger.getAgentModule().getModuleName(), is(module.getModuleName()));
default:
break;
}
}
}
@Test
public void testIsNotDeployed() throws Exception {
AgentConfiguration agentConfiguration = new AgentConfiguration();
agentConfiguration.setActive(true);
agentConfiguration.setAgentName("Test Agent 4");
agentConfiguration.setType(RunType.LOOPED);
SysHub sysHub = CDI.current().select(SysHub.class).get();
assertThat(sysHub, is(notNullValue()));
assertThat(sysHub.isDeployed(agentConfiguration), is(false));
}
@Test
public void testUndeployAgent() throws Exception {
SysHub sysHub = CDI.current().select(SysHub.class).get();
assertThat(sysHub, is(notNullValue()));
AgentConfiguration agentConfiguration = createAgentConfiguration(RunType.LOOPED, "Test Agent 5");
sysHub.deployAgent(agentConfiguration);
pause(600); //give it a chance to run
AgentRunner agentRunner = sysHub.fetchAgentRunner(agentConfiguration);
ProcessingAgent agent = agentRunner.getProcessingAgent();
sysHub.undeployAgent(agentConfiguration);
assertThat(sysHub.isDeployed(agentConfiguration), is(false));
pause(100); //give it a chance to die
assertThat(agent.getLastRunTime(), is(notNullValue()));
Date lastrun = agent.getLastRunTime();
pause(600); //give it a chance to run
assertThat(agent.getLastRunTime(), is(lastrun));
assertThat(agentRunner.isRunning(), is(false));
assertThat(agentRunner.isShutDown(), is(true));
agentConfiguration = createAgentConfiguration(RunType.SCHEDULED, "Test Agent 6");
sysHub.deployAgent(agentConfiguration);
agentRunner = sysHub.fetchAgentRunner(agentConfiguration);
agent = agentRunner.getProcessingAgent();
pause(1000); //give it a chance to run
sysHub.undeployAgent(agentConfiguration);
assertThat(sysHub.isDeployed(agentConfiguration), is(false));
pause(1000); //give it a chance to die
assertThat(agent.getLastRunTime(), is(notNullValue()));
lastrun = agent.getLastRunTime();
pause(1000); //give it a chance to run again
assertThat(agent.getLastRunTime().getTime(), is(lastrun.getTime()));
assertThat(agentRunner.isRunning(), is(false));
assertThat(agentRunner.isShutDown(), is(true));
agentConfiguration = createAgentConfiguration(RunType.TRIGGERED, "Test Agent 7");
sysHub.deployAgent(agentConfiguration);
pause(500);//allow agent to startup
Trigger2 trigger = CDI.current().select(Trigger2.class).get();
trigger.fire();
pause(500);//allow processing to complete
agentRunner = sysHub.fetchAgentRunner(agentConfiguration);
agent = agentRunner.getProcessingAgent();
sysHub.undeployAgent(agentConfiguration);
assertThat(sysHub.isDeployed(agentConfiguration), is(false));
assertThat(agent.getLastRunTime(), is(notNullValue()));
lastrun = agent.getLastRunTime();
pause(1000);//allow processing to die
trigger.fire();
assertThat(agent.getLastRunTime(), is(lastrun));
assertThat(agentRunner.isRunning(), is(false));
assertThat(agentRunner.isShutDown(), is(true));
sysHub.undeployAgent(agentConfiguration);
}
private AgentConfiguration createAgentConfiguration(RunType runType, String agentName) {
AgentConfiguration agentConfiguration = new AgentConfiguration();
agentConfiguration.setActive(true);
agentConfiguration.setAgentName(agentName);
agentConfiguration.setType(runType);
AgentConfigurationParameter parameter = new AgentConfigurationParameter("testParameter1");
parameter.setAgentConfiguration(agentConfiguration);
parameter.setName("schedule.cron");
parameter.setParameterType(ParameterType.CRONCONFIGURATION);
parameter.setStringValue("* * * ? * *");
agentConfiguration.getParameters().add(parameter);
parameter = new AgentConfigurationParameter();
parameter.setAgentConfiguration(agentConfiguration);
parameter.setName("trigger.class");
parameter.setParameterType(ParameterType.CLASS);
parameter.setStringValue(TestAgentTrigger2.class.getName());
agentConfiguration.getParameters().add(parameter);
AgentModule agentModule = new AgentModule();
agentModule.setAgentConfiguration(agentConfiguration);
agentModule.setModuleClassName(TestDataSpooler.class.getName());
agentModule.setModuleName(agentName+".1");
agentModule.setModuleType(ModuleType.DATASPOOLER);
parameter = new AgentConfigurationParameter("testParameter1", ParameterType.BOOLEAN);
parameter.setBooleanValue(true);
parameter.setAgentConfiguration(agentConfiguration);
parameter.setAgentModule(agentModule);
agentModule.getParameters().add(parameter);
agentConfiguration.getAgentModules().add(agentModule);
agentModule = new AgentModule();
agentModule.setAgentConfiguration(agentConfiguration);
agentModule.setModuleClassName(TestFormatter1.class.getName());
agentModule.setModuleName(agentName+".2");
agentModule.setModuleType(ModuleType.FORMATTER);
parameter = new AgentConfigurationParameter("testParameter1", ParameterType.NUMERIC);
parameter.setNumericValue(20.0d);
parameter.setAgentConfiguration(agentConfiguration);
parameter.setAgentModule(agentModule);
agentModule.getParameters().add(parameter);
agentConfiguration.getAgentModules().add(agentModule);
agentModule = new AgentModule();
agentModule.setAgentConfiguration(agentConfiguration);
agentModule.setModuleClassName(TestMessenger1.class.getName());
agentModule.setModuleName(agentName+".3");
agentModule.setModuleType(ModuleType.MESSENGER);
parameter = new AgentConfigurationParameter("testParameter1", ParameterType.EMAIL);
parameter.setStringValue("test@server.com");
parameter.setAgentConfiguration(agentConfiguration);
parameter.setAgentModule(agentModule);
agentModule.getParameters().add(parameter);
agentConfiguration.getAgentModules().add(agentModule);
return agentConfiguration;
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNullName() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithEmptyName() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("");
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNullAgentConfiguration() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNullDataSpoolers() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNoDataSpoolers() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Collections.emptyList());
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNullMappings() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test(expected=ProcessingException.class)
public void testProcessDataWithNoMappings() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
agent.setMessengerFormatterMapper(Collections.emptyMap());
try {
agent.processData();
} finally {
agent.shutdown();
}
}
@Test
public void testProcessDataWithCloseableIteratorThrowingIOException() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.CLOSE, agent);
ErrorObserver observer = CDI.current().select(ErrorObserver.class).get();
agent.setDataSpoolers(Arrays.asList(errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
try {
agent.processData();
assertThat(observer.getQueue().poll(1, TimeUnit.SECONDS) instanceof IOException, is(true));
} finally {
agent.shutdown();
}
}
@Test
public void testProcessDataWithDataSpoolerPrepareThrowingProcessingException() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.PREPARE, agent);
ErrorObserver observer = CDI.current().select(ErrorObserver.class).get();
agent.setDataSpoolers(Arrays.asList(errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
try {
agent.processData();
assertThat(observer.getQueue().poll(1, TimeUnit.SECONDS) instanceof ProcessingException, is(true));
} finally {
agent.shutdown();
}
}
@Test
public void testProcessDataWithMessageFormatterThrowingProcessingException() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.FORMATDATA, agent);
ErrorObserver observer = CDI.current().select(ErrorObserver.class).get();
agent.setDataSpoolers(Arrays.asList(errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
try {
agent.processData();
assertThat(observer.getQueue().poll(1, TimeUnit.SECONDS) instanceof ProcessingException, is(true));
} finally {
agent.shutdown();
}
}
@Test
public void testProcessDataWithMessengerThrowingProcessingException() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.SENDMESSAGE, agent);
ErrorObserver observer = CDI.current().select(ErrorObserver.class).get();
agent.setDataSpoolers(Arrays.asList(errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
try {
agent.processData();
assertThat(observer.getQueue().poll(1, TimeUnit.SECONDS) instanceof ProcessingException, is(true));
} finally {
agent.shutdown();
}
}
@Test
public void testProcessDataWithDataSpoolerUpdateThrowingProcessingException() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.UPDATEDATA, agent);
ErrorObserver observer = CDI.current().select(ErrorObserver.class).get();
agent.setDataSpoolers(Arrays.asList(errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
try {
agent.processData();
assertThat(observer.getQueue().poll(1, TimeUnit.SECONDS) instanceof ProcessingException, is(true));
} finally {
agent.shutdown();
}
}
@Test
public void testSettingInvalidProperties() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
ErrorThrowingModule errorThrowingModule = new ErrorThrowingModule(ExceptionLocation.NONE, agent);
agent.setDataSpoolers(Arrays.asList(errorThrowingModule, errorThrowingModule, errorThrowingModule));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(errorThrowingModule, errorThrowingModule);
agent.setMessengerFormatterMapper(map);
agent.setCorePoolSize(0);
assertThat(agent.getCorePoolSize(), is(5));
agent.setCorePoolSize(10);
assertThat(agent.getCorePoolSize(), is(5));
agent.setCorePoolSize(2);
assertThat(agent.getCorePoolSize(), is(3));
agent.setMaximumPoolSize(0);
assertThat(agent.getMaximumPoolSize(), is(5));
agent.setMaximumPoolSize(2);
assertThat(agent.getMaximumPoolSize(), is(5));
agent.setMaximumPoolSize(10);
assertThat(agent.getMaximumPoolSize(), is(10));
agent.setQueueSize(0);
assertThat(agent.getQueueSize(), is(10));
agent.setQueueSize(20);
assertThat(agent.getQueueSize(), is(20));
agent.setKeepAliveTime(0);
assertThat(agent.getKeepAliveTime(), is(60L));
agent.setKeepAliveTime(30);
assertThat(agent.getKeepAliveTime(), is(30L));
agent.processData();
agent.setMaxDataSpoolerWorkers(10);
assertThat(agent.getMaxDataSpoolerWorkers(), is(5));
agent.setMaxFormatterMessengerWorkers(10);
assertThat(agent.getMaxFormatterMessengerWorkers(), is(5));
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNullMappings() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
agent.reprocessProcessingLog(new ProcessingLog());
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNoMappings() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
agent.setMessengerFormatterMapper(Collections.emptyMap());
agent.reprocessProcessingLog(new ProcessingLog());
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNullProcessingLog() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
agent.reprocessProcessingLog(null);
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNullAgentModule() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
agent.reprocessProcessingLog(new ProcessingLog());
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNullMessageData() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ProcessingLog processingLog = new ProcessingLog();
processingLog.setLogID(1);
processingLog.setAgentConfiguration(agent.getAgentConfiguration());
processingLog.setAgentModule(messenger.getAgentModule());
processingLog.setLogDt(new Date());
agent.reprocessProcessingLog(processingLog);
}
@Test
public void testReprocessProcessingLog() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ProcessingLog processingLog = new ProcessingLog();
processingLog.setLogID(1);
processingLog.setAgentConfiguration(agent.getAgentConfiguration());
processingLog.setAgentModule(messenger.getAgentModule());
processingLog.setLogDt(new Date());
MessageStringImpl messageData = new MessageStringImpl();
messageData.appendBody("Test");
Serializer serializer = CDI.current().select(Serializer.class).get();
processingLog.setMessageData(serializer.toByteArray(messageData));
processingLog.setMid(messageData.getId());
processingLog.setRecipient("Test");
processingLog.setRetries(0);
processingLog.setStatusMessage("Test");
processingLog.setType(LogType.INFO);
agent.reprocessProcessingLog(processingLog);
assertThat(messenger.getSendCount()>0, is(true));
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithoutMatchingModule() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ProcessingLog processingLog = new ProcessingLog();
processingLog.setLogID(1);
processingLog.setAgentConfiguration(agent.getAgentConfiguration());
processingLog.setAgentModule(new AgentModule());
processingLog.getAgentModule().setModuleName("Wrong Test");
processingLog.setLogDt(new Date());
MessageByteImpl messageData = new MessageByteImpl();
messageData.appendBody("Test".getBytes());
Serializer serializer = CDI.current().select(Serializer.class).get();
processingLog.setMessageData(serializer.toByteArray(messageData));
processingLog.setMid(messageData.getId());
processingLog.setRecipient("Test");
processingLog.setRetries(0);
processingLog.setStatusMessage("Test");
processingLog.setType(LogType.INFO);
agent.reprocessProcessingLog(processingLog);
}
@Test(expected=ProcessingException.class)
public void testReprocessProcessingLogWithNonResendCapableModule() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger2 messenger = new TestMessenger2();
messenger.setAgentModule(new AgentModule());
messenger.getAgentModule().setModuleName("Test");
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ProcessingLog processingLog = new ProcessingLog();
processingLog.setLogID(1);
processingLog.setAgentConfiguration(agent.getAgentConfiguration());
processingLog.setAgentModule(messenger.getAgentModule());
processingLog.setLogDt(new Date());
MessageStringImpl messageData = new MessageStringImpl();
messageData.appendBody("Test");
Serializer serializer = CDI.current().select(Serializer.class).get();
processingLog.setMessageData(serializer.toByteArray(messageData));
processingLog.setMid(messageData.getId());
processingLog.setRecipient("Test");
processingLog.setRetries(0);
processingLog.setStatusMessage("Test");
processingLog.setType(LogType.INFO);
agent.reprocessProcessingLog(processingLog);
}
@Test(expected=ProcessingException.class)
public void testReprocessResultExceptionLogWithNullMappins() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
agent.reprocessResultExceptionLog(null);
}
@Test(expected=ProcessingException.class)
public void testReprocessResultExceptionLogWithNoMappings() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
agent.setMessengerFormatterMapper(Collections.emptyMap());
agent.reprocessResultExceptionLog(null);;
}
@Test(expected=ProcessingException.class)
public void testReprocessResultExceptionLogWithNullResultExceptionLog() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(new TestMessenger1(), new TestFormatter1());
agent.setMessengerFormatterMapper(map);
agent.reprocessResultExceptionLog(null);
}
@Test(expected=ProcessingException.class)
public void testReprocessResultExceptionLogWithNullExceptionData() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(new TestMessenger1(), new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ResultExceptionLog exceptionLog = new ResultExceptionLog();
Serializer serializer = CDI.current().select(Serializer.class).get();
Result<Object> result = generateResult();
exceptionLog.setResultData(serializer.toByteArray(result));
agent.reprocessResultExceptionLog(exceptionLog);
}
@Test(expected=ProcessingException.class)
public void testReprocessResultExceptionLogWithNullResultData() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
map.put(new TestMessenger1(), new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ResultExceptionLog exceptionLog = new ResultExceptionLog();
Serializer serializer = CDI.current().select(Serializer.class).get();
exceptionLog.setExceptionData(serializer.toByteArray(new Exception("test")));
agent.reprocessResultExceptionLog(exceptionLog);
}
@Test
public void testReprocessResultExceptionLog() throws Exception {
ProcessingAgent agent = CDI.current().select(ProcessingAgent.class).get();
agent.setAgentConfiguration(new AgentConfiguration());
agent.setName("Name");
agent.setDataSpoolers(Arrays.asList(new TestDataSpooler()));
Map<Messenger, MessageFormatter> map = new HashMap<>();
TestMessenger1 messenger = new TestMessenger1();
map.put(messenger, new TestFormatter1());
agent.setMessengerFormatterMapper(map);
ResultExceptionLog exceptionLog = new ResultExceptionLog();
Serializer serializer = CDI.current().select(Serializer.class).get();
Result<Object> result = generateResult();
exceptionLog.setResultData(serializer.toByteArray(result));
exceptionLog.setExceptionData(serializer.toByteArray(new Exception("test")));
agent.reprocessResultExceptionLog(exceptionLog);
assertThat(messenger.getSendCount()>0, is(true));
}
private Result<Object> generateResult() {
Result<Object> result = new ResultImpl();
result.addRow(MapRowBuilder.create()
.row()
.addColumn("agent.name", "Test")
.thenBuild())
.addMetaDataResult(MapRowBuilder.create()
.row()
.addColumn("agent.name", "Test 2")
.thenBuild());
List<Map<String, Object>> rowsList = Arrays.asList(MapRowBuilder.create()
.row()
.addColumn("agent.module.name", "Test")
.thenBuild());
result.addAllRows(rowsList);
result.addAllMetaData(rowsList);
result.addProperty("test", "value");
return result;
}
@Test
public void testProcessingAgentBuilder() throws Exception {
UserTransaction transaction = JavaTransactionManagerSpiFactory
.getInstance()
.getJavaTransactionManagerSpi()
.getUserTransaction();
transaction.begin();
AgentModule agentModule1, agentModule2;
try {
DataStore dataStore = DataStoreFactory.getInstance().getDataStore("system");
agentModule1 = dataStore.get(AgentModule.class, 2);
agentModule2 = dataStore.get(AgentModule.class, 3);
AgentConfigurationParameter parameter = new AgentConfigurationParameter();
parameter.setName("numericParamter");
parameter.setNumericValue(1.0);
parameter.setParameterType(ParameterType.NUMERIC);
ProcessingAgentBuilder agentBuilder = new ProcessingAgentBuilder();
ProcessingAgent processingAgent = agentBuilder.name("Test ProcessingAgentBuilder")
.addConfigurationParameter(parameter)
.maxDataSpoolerWorkers(10)
.maxFormatterWorkers(15.0)
.queueSize(30.0)
.keepAliveTime(120.0)
.corePoolSize( 5.0)
.maximumPoolSize(45.0)
.dataSpooler(TestDataSpooler.class, "Test ProcessingAgentBuilder Module 1")
.addBinaryParameter("test1", "value".getBytes())
.messageFormatter(TestFormatter1.class, "Test ProcessingAgentBuilder Module 2")
.addBooleanParameter("test2", true)
.messenger(TestMessenger1.class, "Test ProcessingAgentBuilder Module 3")
.addStringParameter("test3", "value", ParameterType.PASSWORD)
.map(agentModule1).map(agentModule2)
.build();
AgentConfiguration agentConfiguration = agentBuilder.getConfiguration();
for(AgentModule agentModule:agentConfiguration.getAgentModules()) {
parameter = agentModule.getParameters().iterator().next();
switch (agentModule.getModuleType()) {
case DATASPOOLER:
assertThat(parameter.getName(), is("test1"));
assertThat(parameter.getParameterType(), is(ParameterType.BINARY));
assertThat(parameter.getStringValue(), is("dmFsdWU="));
break;
case FORMATTER:
assertThat(parameter.getName(), is("test2"));
assertThat(parameter.getParameterType(), is(ParameterType.BOOLEAN));
assertThat(parameter.getBooleanValue(), is(true));
break;
case MESSENGER:
assertThat(parameter.getName(), is("test3"));
assertThat(parameter.getParameterType(), is(ParameterType.PASSWORD));
assertThat(parameter.getStringValue(), is("value"));
break;
}
}
assertThat(processingAgent.getDataSpoolers().size(), is(1));
assertThat(processingAgent.getDataSpoolers().get(0) instanceof TestDataSpooler, is(true));
assertThat(processingAgent.getMessengerFormatterMapper().size(), is(2));
for(Entry<Messenger, MessageFormatter> entry :processingAgent.getMessengerFormatterMapper().entrySet()) {
assertThat(entry.getKey() instanceof TestMessenger1, is(true));
assertThat(entry.getValue() instanceof TestFormatter1, is(true));
}
Set<AgentConfigurationParameter> parameters = processingAgent.getAgentConfiguration().getParameters();
assertThat(parameters.size(), is(7));
parameters = agentConfiguration.getParameters();
assertThat(parameters.size(), is(7));
List<String> lists = new ArrayList<>(Arrays.asList("maxFormatterWorkers","maxDataSpoolerWorkers",
"queueSize","keepAliveTime",
"corePoolSize","maximumPoolSize"));
for(AgentConfigurationParameter agentConfigurationParameter:parameters) {
switch (agentConfigurationParameter.getName()) {
case "numericParamter":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(1.0d));
break;
case "maxDataSpoolerWorkers":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(10.0d));
assertThat(processingAgent.getMaxDataSpoolerWorkers(), is(10));
break;
case "maxFormatterWorkers":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(15.0d));
assertThat(processingAgent.getMaxFormatterMessengerWorkers(), is(15));
break;
case "queueSize":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(30.0d));
assertThat(processingAgent.getQueueSize(), is(30));
break;
case "keepAliveTime":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(120.0d));
assertThat(processingAgent.getKeepAliveTime(), is(120l));
break;
case "corePoolSize":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(5.0d));
assertThat(processingAgent.getCorePoolSize(), is(5));
break;
case "maximumPoolSize":
assertThat(agentConfigurationParameter.getParameterType(), is(ParameterType.NUMERIC));
assertThat(agentConfigurationParameter.getNumericValue(), is(45.0d));
assertThat(processingAgent.getMaximumPoolSize(), is(45));
break;
default:
fail("Uknown parameter: "+agentConfigurationParameter.getName());
break;
}
lists.remove(agentConfigurationParameter.getName());
}
assertThat(lists.size(), is(0));
} finally {
transaction.commit();
}
}
@Test(expected=ConfigurationException.class)
public void testProcessingAgentBuilderWithNoAgentName() throws Exception {
ProcessingAgentBuilder agentBuilder = new ProcessingAgentBuilder();
agentBuilder.build();
}
@Test(expected=ConfigurationException.class)
public void testProcessingAgentBuilderWithNoDataSpoolers() throws Exception {
ProcessingAgentBuilder agentBuilder = new ProcessingAgentBuilder();
agentBuilder.name("Test Error")
.messageFormatter(TestFormatter1.class, "Test ProcessingAgentBuilder Module 2")
.messenger(TestMessenger1.class, "Test ProcessingAgentBuilder Module 2")
.build();
}
@Test(expected=ConfigurationException.class)
public void testProcessingAgentBuilderWithNoMessenger() throws Exception {
ProcessingAgentBuilder agentBuilder = new ProcessingAgentBuilder();
agentBuilder.name("Test Error")
.dataSpooler(TestDataSpooler.class, "Test ProcessingAgentBuilder Module 1")
.messageFormatter(TestFormatter1.class, "Test ProcessingAgentBuilder Module 2")
.build();
}
@Test(expected=ConfigurationException.class)
public void testProcessingAgentBuilderWithNoMessageFormatter() throws Exception {
ProcessingAgentBuilder agentBuilder = new ProcessingAgentBuilder();
agentBuilder.name("Test Error")
.dataSpooler(TestDataSpooler.class, "Test ProcessingAgentBuilder Module 1")
.messenger(TestMessenger1.class, "Test ProcessingAgentBuilder Module 2")
.build();
}
@Test
public void testLogging() throws Exception {
LoggerImpl loggerImpl = LoggerImpl.getInstance();
AgentConfiguration agentConfiguration = new AgentConfiguration();
agentConfiguration.setId(1);
AgentModule formatterModule = new AgentModule();
formatterModule.setId(2);
AgentModule messengerModule = new AgentModule();
messengerModule.setId(3);
Result<Object> result = generateResult();
Exception e = new Exception("Test Error");
loggerImpl.logResultException(agentConfiguration,
formatterModule,
e, result);
Message<String> message1 = new MessageStringImpl()
.appendBody("Test Log");
loggerImpl.logMessage(agentConfiguration,
messengerModule, message1,"Test response", false);
Message<String> message2 = new MessageStringImpl()
.appendBody("Test Queue");
loggerImpl.queueMessage(agentConfiguration, messengerModule,
message2, "Test Queue");
Message<String> message3 = new MessageStringImpl()
.appendBody("Test Queue");
loggerImpl.storeMessage(agentConfiguration, messengerModule,
message3, "Test Store");
if (loggerImpl.getUnpersistedProcessingLogs().size()>0) {
pause(1000);
if (loggerImpl.getUnpersistedProcessingLogs().size()>0) {
loggerImpl.pushLogsToDB();
}
}
UserTransaction transaction = JavaTransactionManagerSpiFactory
.getInstance()
.getJavaTransactionManagerSpi()
.getUserTransaction();
Serializer serializer = CDI.current().select(Serializer.class).get();
transaction.begin();
try {
List<ProcessingLog> processingLogs = DataStoreFactory
.getInstance()
.getDataStore("system")
.find(ProcessingLog.class)
.filterBy("agentConfiguration")
.withAValueEqualTo(agentConfiguration)
.filterBy("agentModule")
.withAValueEqualTo(messengerModule)
.thenList();
assertThat(processingLogs.size(), is(3));
for(ProcessingLog processingLog:processingLogs) {
if(processingLog.getType() == LogType.INFO){
assertThat(processingLog.getAgentConfiguration(), is(notNullValue()));
assertThat(processingLog.getAgentConfiguration().getId(), is(1));
assertThat(processingLog.getAgentModule(), is(notNullValue()));
assertThat(processingLog.getAgentModule().getId(), is(3));
assertThat(processingLog.getLogDt(), is(notNullValue()));
assertThat(processingLog.getMessageData(), is(notNullValue()));
assertThat(((Message<?>)serializer.toObject(processingLog.getMessageData())).getId(), is(message1.getId()));
assertThat(processingLog.getMid(), is(message1.getId()));
assertThat(processingLog.getRecipient(), is(notNullValue()));
assertThat(processingLog.getStatusMessage(), is("Test response"));
break;
}
}
List<ResultExceptionLog> exceptionLogs = DataStoreFactory
.getInstance()
.getDataStore("system")
.find(ResultExceptionLog.class)
.filterBy("agentConfiguration")
.withAValueEqualTo(agentConfiguration)
.filterBy("agentModule")
.withAValueEqualTo(formatterModule)
.thenList();
assertThat(exceptionLogs.size(), is(1));
assertThat(exceptionLogs.get(0).getAgentConfiguration(), is(notNullValue()));
assertThat(exceptionLogs.get(0).getAgentConfiguration().getId(), is(1));
assertThat(exceptionLogs.get(0).getAgentModule(), is(notNullValue()));
assertThat(exceptionLogs.get(0).getAgentModule().getId(), is(2));
assertThat(serializer.toObject(exceptionLogs.get(0).getResultData()), is(result));
assertThat(((Exception)serializer.toObject(exceptionLogs.get(0).getExceptionData())).getMessage(), is(e.getMessage()));
assertThat(exceptionLogs.get(0).getExceptionType(), is(e.getClass().getName()));
} finally {
transaction.commit();
}
}
@Test
public void testModelEquals() throws Exception {
assertThat(new AgentConfiguration(),
is(new AgentConfiguration()));
AgentConfiguration agentConfiguration1 = new AgentConfiguration();
agentConfiguration1.setAgentName("Test");
assertThat(agentConfiguration1, is(agentConfiguration1));
assertThat(agentConfiguration1.hashCode(), is(agentConfiguration1.hashCode()));
AgentConfiguration agentConfiguration2 = new AgentConfiguration();
agentConfiguration1.setAgentName("Test");
assertThat(agentConfiguration1, is(not(agentConfiguration2)));
assertThat(agentConfiguration1.hashCode(), is(not(agentConfiguration2.hashCode())));
agentConfiguration2.setAgentName("Test");
assertThat(agentConfiguration1, is(agentConfiguration2));
assertThat(agentConfiguration1.hashCode(), is(agentConfiguration2.hashCode()));
assertThat(agentConfiguration1.equals(null), is(false));
assertThat(((Object)agentConfiguration1).equals(""), is(false));
assertThat(new AgentModule(), is(new AgentModule()));
AgentModule agentModule1 = new AgentModule();
AgentModule agentModule2 = new AgentModule();
assertThat(agentModule1, is(agentModule1));
assertThat(agentModule1.hashCode(), is(agentModule1.hashCode()));
agentModule1.setAgentConfiguration(agentConfiguration1);
assertThat(agentModule1, is(not(agentModule2)));
assertThat(agentModule1.hashCode(), is(not(agentModule2.hashCode())));
agentModule2.setAgentConfiguration(agentConfiguration1);
assertThat(agentModule1, is(agentModule2));
assertThat(agentModule1.hashCode(), is(agentModule2.hashCode()));
assertThat(agentModule1, is(agentModule1));
assertThat(agentModule1.hashCode(), is(agentModule1.hashCode()));
agentModule1.setModuleName("Test");
assertThat(agentModule1, is(not(agentModule2)));
assertThat(agentModule1.hashCode(), is(not(agentModule2.hashCode())));
agentModule2.setModuleName("Test");
assertThat(agentModule1, is(agentModule2));
assertThat(agentModule1.hashCode(), is(agentModule2.hashCode()));
assertThat(agentModule1.equals(null), is(false));
assertThat(((Object)agentModule1).equals(""), is(false));
assertThat(new AgentConfigurationModuleMapping(),
is(new AgentConfigurationModuleMapping()));
AgentConfigurationModuleMapping agentConfigurationModuleMapping1 =
new AgentConfigurationModuleMapping();
AgentConfigurationModuleMapping agentConfigurationModuleMapping2 =
new AgentConfigurationModuleMapping();
assertThat(agentConfigurationModuleMapping1, is(agentConfigurationModuleMapping1));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(agentConfigurationModuleMapping1.hashCode()));
agentConfigurationModuleMapping1.setAcid(1);
assertThat(agentConfigurationModuleMapping1, is(not(agentConfigurationModuleMapping2)));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(not(agentConfigurationModuleMapping2.hashCode())));
agentConfigurationModuleMapping2.setAcid(1);
assertThat(agentConfigurationModuleMapping1, is(agentConfigurationModuleMapping2));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(agentConfigurationModuleMapping2.hashCode()));
assertThat(agentConfigurationModuleMapping1, is(agentConfigurationModuleMapping1));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(agentConfigurationModuleMapping1.hashCode()));
agentConfigurationModuleMapping1.setAmid(2);
assertThat(agentConfigurationModuleMapping1, is(not(agentConfigurationModuleMapping2)));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(not(agentConfigurationModuleMapping2.hashCode())));
agentConfigurationModuleMapping2.setAmid(2);
assertThat(agentConfigurationModuleMapping1, is(agentConfigurationModuleMapping2));
assertThat(agentConfigurationModuleMapping1.hashCode(), is(agentConfigurationModuleMapping2.hashCode()));
assertThat(agentConfigurationModuleMapping1.equals(null), is(false));
assertThat(((Object)agentConfigurationModuleMapping1).equals(""), is(false));
assertThat(new AgentConfigurationParameter(),
is(new AgentConfigurationParameter()));
AgentConfigurationParameter agentConfigurationParameter1 =
new AgentConfigurationParameter();
AgentConfigurationParameter agentConfigurationParameter2 =
new AgentConfigurationParameter();
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter1));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter1.hashCode()));
agentConfigurationParameter1.setAgentConfiguration(agentConfiguration1);
assertThat(agentConfigurationParameter1, is(not(agentConfigurationParameter2)));
assertThat(agentConfigurationParameter1.hashCode(), is(not(agentConfigurationParameter2.hashCode())));
agentConfigurationParameter2.setAgentConfiguration(agentConfiguration1);
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter2));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter2.hashCode()));
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter1));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter1.hashCode()));
agentConfigurationParameter1.setAgentModule(agentModule1);
assertThat(agentConfigurationParameter1, is(not(agentConfigurationParameter2)));
assertThat(agentConfigurationParameter1.hashCode(), is(not(agentConfigurationParameter2.hashCode())));
agentConfigurationParameter2.setAgentModule(agentModule1);
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter2));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter2.hashCode()));
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter1));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter1.hashCode()));
agentConfigurationParameter1.setName("Test");
assertThat(agentConfigurationParameter1, is(not(agentConfigurationParameter2)));
assertThat(agentConfigurationParameter1.hashCode(), is(not(agentConfigurationParameter2.hashCode())));
agentConfigurationParameter2.setName("Test");
assertThat(agentConfigurationParameter1, is(agentConfigurationParameter2));
assertThat(agentConfigurationParameter1.hashCode(), is(agentConfigurationParameter2.hashCode()));
assertThat(agentConfigurationParameter1.equals(null), is(false));
assertThat(((Object)agentConfigurationParameter1).equals(""), is(false));
assertThat(new MaxID(), is(new MaxID()));
MaxID maxID1 = new MaxID();
maxID1.setMaxIDName("Test");
MaxID maxID2 = new MaxID();
assertThat(maxID1, is(not(maxID2)));
maxID2.setMaxIDName("Test");
assertThat(maxID1, is(maxID2));
assertThat(maxID1.equals(null), is(false));
assertThat(((Object)maxID1).equals(""), is(false));
assertThat(new ProcessingLog(), is(new ProcessingLog()));
ProcessingLog processingLog1 = new ProcessingLog();
processingLog1.setMid("Test");
ProcessingLog processingLog2 = new ProcessingLog();
assertThat(processingLog1, is(not(processingLog2)));
processingLog2.setMid("Test");
assertThat(processingLog1, is(processingLog2));
assertThat(processingLog1.equals(null), is(false));
assertThat(((Object)processingLog1).equals(""), is(false));
assertThat(new ResultExceptionLog(), is(new ResultExceptionLog()));
ResultExceptionLog resultExceptionLog1 = new ResultExceptionLog();
ResultExceptionLog resultExceptionLog2 = new ResultExceptionLog();
Date date = new Date(System.currentTimeMillis()-100000);
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setAgentConfiguration(agentConfiguration1);
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setAgentConfiguration(agentConfiguration1);
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setAgentModule(agentModule1);
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setAgentModule(agentModule1);
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setExceptionData("Test".getBytes());
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setExceptionData("Test".getBytes());
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setExceptionDt(date);
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setExceptionDt(date);
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setExceptionType("Test2");
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setExceptionType("Test2");
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1, is(resultExceptionLog1));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog1.hashCode()));
resultExceptionLog1.setResultData("Test3".getBytes());
assertThat(resultExceptionLog1, is(not(resultExceptionLog2)));
assertThat(resultExceptionLog1.hashCode(), is(not(resultExceptionLog2.hashCode())));
resultExceptionLog2.setResultData("Test3".getBytes());
assertThat(resultExceptionLog1, is(resultExceptionLog2));
assertThat(resultExceptionLog1.hashCode(), is(resultExceptionLog2.hashCode()));
assertThat(resultExceptionLog1.equals(null), is(false));
assertThat(((Object)resultExceptionLog1).equals(""), is(false));
assertThat(new TransactionLog(), is(new TransactionLog()));
TransactionLog transactionLog1 = new TransactionLog();
TransactionLog transactionLog2 = new TransactionLog();
assertThat(transactionLog1, is(transactionLog1));
assertThat(transactionLog1.hashCode(), is(transactionLog1.hashCode()));
transactionLog1.setAction("Test");
assertThat(transactionLog1, is(not(transactionLog2)));
assertThat(transactionLog1.hashCode(), is(not(transactionLog2.hashCode())));
transactionLog2.setAction("Test");
assertThat(transactionLog1, is(transactionLog2));
assertThat(transactionLog1.hashCode(), is(transactionLog2.hashCode()));
assertThat(transactionLog1, is(transactionLog1));
assertThat(transactionLog1.hashCode(), is(transactionLog1.hashCode()));
transactionLog1.setProcessingLog(processingLog1);
assertThat(transactionLog1, is(not(transactionLog2)));
assertThat(transactionLog1.hashCode(), is(not(transactionLog2.hashCode())));
transactionLog2.setProcessingLog(processingLog1);
assertThat(transactionLog1, is(transactionLog2));
assertThat(transactionLog1.hashCode(), is(transactionLog2.hashCode()));
assertThat(transactionLog1, is(transactionLog1));
assertThat(transactionLog1.hashCode(), is(transactionLog1.hashCode()));
transactionLog1.setTranDt(date);
assertThat(transactionLog1, is(not(transactionLog2)));
assertThat(transactionLog1.hashCode(), is(not(transactionLog2.hashCode())));
transactionLog2.setTranDt(date);
assertThat(transactionLog1, is(transactionLog2));
assertThat(transactionLog1.hashCode(), is(transactionLog2.hashCode()));
assertThat(transactionLog1, is(transactionLog1));
assertThat(transactionLog1.hashCode(), is(transactionLog1.hashCode()));
transactionLog1.setUsername("Name");
assertThat(transactionLog1, is(not(transactionLog2)));
assertThat(transactionLog1.hashCode(), is(not(transactionLog2.hashCode())));
transactionLog2.setUsername("Name");
assertThat(transactionLog1, is(transactionLog2));
assertThat(transactionLog1.hashCode(), is(transactionLog2.hashCode()));
assertThat(transactionLog1.equals(null), is(false));
assertThat(((Object)transactionLog1).equals(""), is(false));
assertThat(new VariableCache(), is(new VariableCache()));
VariableCache variableCache1 = new VariableCache();
VariableCache variableCache2 = new VariableCache();
assertThat(variableCache1, is(variableCache1));
assertThat(variableCache1.hashCode(), is(variableCache1.hashCode()));
variableCache1.setAppKey("Test");
assertThat(variableCache1, is(not(variableCache2)));
assertThat(variableCache1.hashCode(), is(not(variableCache2.hashCode())));
variableCache2.setAppKey("Test");
assertThat(variableCache1, is(variableCache2));
assertThat(variableCache1.hashCode(), is(variableCache2.hashCode()));
assertThat(variableCache1.equals(null), is(false));
assertThat(((Object)variableCache1).equals(""), is(false));
}
private void pause(long time){
long start = System.currentTimeMillis();
while (System.currentTimeMillis() - start < time);
}
@Test
public void testShutdown() throws Exception {
AgentConfiguration agentConfiguration = new AgentConfiguration();
agentConfiguration.setActive(true);
agentConfiguration.setAgentName("Test Shutdown Agent 1");
agentConfiguration.setType(RunType.LOOPED);
AgentModule agentModule = new AgentModule();
agentModule.setId(1);
agentModule.setModuleClassName(TestDataSpooler.class.getName());
agentModule.setModuleName("Test Module 1.1");
agentModule.setModuleType(ModuleType.DATASPOOLER);
agentConfiguration.getAgentModules().add(agentModule);
agentModule = new AgentModule();
agentModule.setId(22);
agentModule.setAgentConfiguration(agentConfiguration);
agentModule.setModuleClassName(TestFormatter1.class.getName());
agentModule.setModuleName("Test Shutdown Formatter");
agentModule.setModuleType(ModuleType.FORMATTER);
agentConfiguration.getAgentModules().add(agentModule);
agentModule = new AgentModule();
agentModule.setId(22);
agentModule.setAgentConfiguration(agentConfiguration);
agentModule.setModuleClassName(TestMessenger1.class.getName());
agentModule.setModuleName("Test Shutdown Messenger");
agentModule.setModuleType(ModuleType.MESSENGER);
agentConfiguration.getAgentModules().add(agentModule);
SysHub sysHubMain = CDI.current().select(SysHub.class).get();
sysHubMain.deployAgent(agentConfiguration);
ProcessingAgent processingAgent = sysHubMain.fetchAgentRunner(agentConfiguration).getProcessingAgent();
sysHubMain.undeployAgent(agentConfiguration);
assertThat(processingAgent.getDataSpoolers().size(), is(1));
assertThat(processingAgent.getMessengerFormatterMapper().size(), is(1));
processingAgent.getDataSpoolers()
.forEach(dataSpooler -> assertThat(((ShutdownMonitor) dataSpooler).isShutdown(), is(false)));
processingAgent.getMessengerFormatterMapper().keySet()
.forEach(formatter -> assertThat(((ShutdownMonitor) formatter).isShutdown(), is(true)));
processingAgent.getMessengerFormatterMapper().values()
.forEach(messenger -> assertThat(((ShutdownMonitor) messenger).isShutdown(), is(true)));
processingAgent = new ProcessingAgent();
processingAgent.shutdown();
}
@Test
public void testParameterBroadcast() throws Exception {
AgentConfigurationParameter parameter = new AgentConfigurationParameter();
EventType eventType = EventType.UPDATED;
ParameterEventBroadcaster parameterEventBroadcaster = CDI.current()
.select(ParameterEventBroadcaster.class).get();
List<ParameterEventListener> listeners = Collections
.synchronizedList(new ArrayList<>());
for(int i=0;i<1000;i++) {
listeners.add(e->{});
}
Executor executor1 = Executors.newFixedThreadPool(4),
executor2 = Executors.newFixedThreadPool(4);
class ExceptionHolder {
Exception e;
}
ExceptionHolder holder = new ExceptionHolder();
for(int i=0;i<1000;i++) {
ParameterEventListener listener = listeners.get(i);
parameterEventBroadcaster.registerListener(listener);
executor1.execute(()->{
try {
parameterEventBroadcaster.broadcast(new ParameterEvent(parameter, eventType));
executor2.execute(()->{
try {
parameterEventBroadcaster.unregisterListener(listener);
} catch (ConcurrentModificationException e) {
holder.e = e;
}
});
} catch (ConcurrentModificationException e) {
holder.e = e;
}
});
}
assertNull(holder.e);
}
}
| |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* AttributeSelection.java
* Copyright (C) 1999 University of Waikato, Hamilton, New Zealand
*
*/
package weka.filters.supervised.attribute;
import weka.attributeSelection.ASEvaluation;
import weka.attributeSelection.ASSearch;
import weka.attributeSelection.AttributeEvaluator;
import weka.attributeSelection.AttributeTransformer;
import weka.attributeSelection.BestFirst;
import weka.attributeSelection.CfsSubsetEval;
import weka.attributeSelection.Ranker;
import weka.attributeSelection.UnsupervisedAttributeEvaluator;
import weka.attributeSelection.UnsupervisedSubsetEvaluator;
import weka.core.Capabilities;
import weka.core.FastVector;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.SparseInstance;
import weka.core.Utils;
import weka.core.Capabilities.Capability;
import weka.filters.Filter;
import weka.filters.SupervisedFilter;
import java.util.Enumeration;
import java.util.Vector;
/**
<!-- globalinfo-start -->
* A supervised attribute filter that can be used to select attributes. It is very flexible and allows various search and evaluation methods to be combined.
* <p/>
<!-- globalinfo-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -S <"Name of search class [search options]">
* Sets search method for subset evaluators.
* eg. -S "weka.attributeSelection.BestFirst -S 8"</pre>
*
* <pre> -E <"Name of attribute/subset evaluation class [evaluator options]">
* Sets attribute/subset evaluator.
* eg. -E "weka.attributeSelection.CfsSubsetEval -L"</pre>
*
* <pre>
* Options specific to evaluator weka.attributeSelection.CfsSubsetEval:
* </pre>
*
* <pre> -M
* Treat missing values as a seperate value.</pre>
*
* <pre> -L
* Don't include locally predictive attributes.</pre>
*
* <pre>
* Options specific to search weka.attributeSelection.BestFirst:
* </pre>
*
* <pre> -P <start set>
* Specify a starting set of attributes.
* Eg. 1,3,5-7.</pre>
*
* <pre> -D <0 = backward | 1 = forward | 2 = bi-directional>
* Direction of search. (default = 1).</pre>
*
* <pre> -N <num>
* Number of non-improving nodes to
* consider before terminating search.</pre>
*
* <pre> -S <num>
* Size of lookup cache for evaluated subsets.
* Expressed as a multiple of the number of
* attributes in the data set. (default = 1)</pre>
*
<!-- options-end -->
*
* @author Mark Hall (mhall@cs.waikato.ac.nz)
* @version $Revision: 5541 $
*/
public class AttributeSelection
extends Filter
implements SupervisedFilter, OptionHandler {
/** for serialization */
static final long serialVersionUID = -296211247688169716L;
/** the attribute selection evaluation object */
private weka.attributeSelection.AttributeSelection m_trainSelector;
/** the attribute evaluator to use */
private ASEvaluation m_ASEvaluator;
/** the search method if any */
private ASSearch m_ASSearch;
/** holds a copy of the full set of valid options passed to the filter */
private String [] m_FilterOptions;
/** holds the selected attributes */
private int [] m_SelectedAttributes;
/**
* Returns a string describing this filter
*
* @return a description of the filter suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "A supervised attribute filter that can be used to select "
+ "attributes. It is very flexible and allows various search "
+ "and evaluation methods to be combined.";
}
/**
* Constructor
*/
public AttributeSelection () {
resetOptions();
}
/**
* Returns an enumeration describing the available options.
* @return an enumeration of all the available options.
*/
public Enumeration listOptions() {
Vector newVector = new Vector(6);
newVector.addElement(new Option(
"\tSets search method for subset evaluators.\n"
+ "\teg. -S \"weka.attributeSelection.BestFirst -S 8\"",
"S", 1,
"-S <\"Name of search class [search options]\">"));
newVector.addElement(new Option(
"\tSets attribute/subset evaluator.\n"
+ "\teg. -E \"weka.attributeSelection.CfsSubsetEval -L\"",
"E", 1,
"-E <\"Name of attribute/subset evaluation class [evaluator options]\">"));
if ((m_ASEvaluator != null) && (m_ASEvaluator instanceof OptionHandler)) {
Enumeration enu = ((OptionHandler)m_ASEvaluator).listOptions();
newVector.addElement(new Option("", "", 0, "\nOptions specific to "
+ "evaluator " + m_ASEvaluator.getClass().getName() + ":"));
while (enu.hasMoreElements()) {
newVector.addElement((Option)enu.nextElement());
}
}
if ((m_ASSearch != null) && (m_ASSearch instanceof OptionHandler)) {
Enumeration enu = ((OptionHandler)m_ASSearch).listOptions();
newVector.addElement(new Option("", "", 0, "\nOptions specific to "
+ "search " + m_ASSearch.getClass().getName() + ":"));
while (enu.hasMoreElements()) {
newVector.addElement((Option)enu.nextElement());
}
}
return newVector.elements();
}
/**
* Parses a given list of options. <p/>
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -S <"Name of search class [search options]">
* Sets search method for subset evaluators.
* eg. -S "weka.attributeSelection.BestFirst -S 8"</pre>
*
* <pre> -E <"Name of attribute/subset evaluation class [evaluator options]">
* Sets attribute/subset evaluator.
* eg. -E "weka.attributeSelection.CfsSubsetEval -L"</pre>
*
* <pre>
* Options specific to evaluator weka.attributeSelection.CfsSubsetEval:
* </pre>
*
* <pre> -M
* Treat missing values as a seperate value.</pre>
*
* <pre> -L
* Don't include locally predictive attributes.</pre>
*
* <pre>
* Options specific to search weka.attributeSelection.BestFirst:
* </pre>
*
* <pre> -P <start set>
* Specify a starting set of attributes.
* Eg. 1,3,5-7.</pre>
*
* <pre> -D <0 = backward | 1 = forward | 2 = bi-directional>
* Direction of search. (default = 1).</pre>
*
* <pre> -N <num>
* Number of non-improving nodes to
* consider before terminating search.</pre>
*
* <pre> -S <num>
* Size of lookup cache for evaluated subsets.
* Expressed as a multiple of the number of
* attributes in the data set. (default = 1)</pre>
*
<!-- options-end -->
*
* @param options the list of options as an array of strings
* @throws Exception if an option is not supported
*/
public void setOptions(String[] options) throws Exception {
String optionString;
resetOptions();
if (Utils.getFlag('X',options)) {
throw new Exception("Cross validation is not a valid option"
+ " when using attribute selection as a Filter.");
}
optionString = Utils.getOption('E',options);
if (optionString.length() != 0) {
optionString = optionString.trim();
// split a quoted evaluator name from its options (if any)
int breakLoc = optionString.indexOf(' ');
String evalClassName = optionString;
String evalOptionsString = "";
String [] evalOptions=null;
if (breakLoc != -1) {
evalClassName = optionString.substring(0, breakLoc);
evalOptionsString = optionString.substring(breakLoc).trim();
evalOptions = Utils.splitOptions(evalOptionsString);
}
setEvaluator(ASEvaluation.forName(evalClassName, evalOptions));
}
if (m_ASEvaluator instanceof AttributeEvaluator) {
setSearch(new Ranker());
}
optionString = Utils.getOption('S',options);
if (optionString.length() != 0) {
optionString = optionString.trim();
int breakLoc = optionString.indexOf(' ');
String SearchClassName = optionString;
String SearchOptionsString = "";
String [] SearchOptions=null;
if (breakLoc != -1) {
SearchClassName = optionString.substring(0, breakLoc);
SearchOptionsString = optionString.substring(breakLoc).trim();
SearchOptions = Utils.splitOptions(SearchOptionsString);
}
setSearch(ASSearch.forName(SearchClassName, SearchOptions));
}
Utils.checkForRemainingOptions(options);
}
/**
* Gets the current settings for the attribute selection (search, evaluator)
* etc.
*
* @return an array of strings suitable for passing to setOptions()
*/
public String [] getOptions() {
String [] EvaluatorOptions = new String[0];
String [] SearchOptions = new String[0];
int current = 0;
if (m_ASEvaluator instanceof OptionHandler) {
EvaluatorOptions = ((OptionHandler)m_ASEvaluator).getOptions();
}
if (m_ASSearch instanceof OptionHandler) {
SearchOptions = ((OptionHandler)m_ASSearch).getOptions();
}
String [] setOptions = new String [10];
setOptions[current++]="-E";
setOptions[current++]= getEvaluator().getClass().getName()
+" "+Utils.joinOptions(EvaluatorOptions);
setOptions[current++]="-S";
setOptions[current++]=getSearch().getClass().getName()
+ " "+Utils.joinOptions(SearchOptions);
while (current < setOptions.length) {
setOptions[current++] = "";
}
return setOptions;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String evaluatorTipText() {
return "Determines how attributes/attribute subsets are evaluated.";
}
/**
* set attribute/subset evaluator
*
* @param evaluator the evaluator to use
*/
public void setEvaluator(ASEvaluation evaluator) {
m_ASEvaluator = evaluator;
}
/**
* Returns the tip text for this property
*
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String searchTipText() {
return "Determines the search method.";
}
/**
* Set search class
*
* @param search the search class to use
*/
public void setSearch(ASSearch search) {
m_ASSearch = search;
}
/**
* Get the name of the attribute/subset evaluator
*
* @return the name of the attribute/subset evaluator as a string
*/
public ASEvaluation getEvaluator() {
return m_ASEvaluator;
}
/**
* Get the name of the search method
*
* @return the name of the search method as a string
*/
public ASSearch getSearch() {
return m_ASSearch;
}
/**
* Returns the Capabilities of this filter.
*
* @return the capabilities of this object
* @see Capabilities
*/
public Capabilities getCapabilities() {
Capabilities result;
if (m_ASEvaluator == null) {
result = super.getCapabilities();
result.disableAll();
}
else {
result = m_ASEvaluator.getCapabilities();
// class index will be set if necessary, so we always allow the dataset
// to have no class attribute set. see the following method:
// weka.attributeSelection.AttributeSelection.SelectAttributes(Instances)
result.enable(Capability.NO_CLASS);
}
result.setMinimumNumberInstances(0);
return result;
}
/**
* Input an instance for filtering. Ordinarily the instance is processed
* and made available for output immediately. Some filters require all
* instances be read before producing output.
*
* @param instance the input instance
* @return true if the filtered instance may now be
* collected with output().
* @throws IllegalStateException if no input format has been defined.
* @throws Exception if the input instance was not of the correct format
* or if there was a problem with the filtering.
*/
public boolean input(Instance instance) throws Exception {
if (getInputFormat() == null) {
throw new IllegalStateException("No input instance format defined");
}
if (m_NewBatch) {
resetQueue();
m_NewBatch = false;
}
if (isOutputFormatDefined()) {
convertInstance(instance);
return true;
}
bufferInput(instance);
return false;
}
/**
* Signify that this batch of input to the filter is finished. If the filter
* requires all instances prior to filtering, output() may now be called
* to retrieve the filtered instances.
*
* @return true if there are instances pending output.
* @throws IllegalStateException if no input structure has been defined.
* @throws Exception if there is a problem during the attribute selection.
*/
public boolean batchFinished() throws Exception {
if (getInputFormat() == null) {
throw new IllegalStateException("No input instance format defined");
}
if (!isOutputFormatDefined()) {
m_trainSelector.setEvaluator(m_ASEvaluator);
m_trainSelector.setSearch(m_ASSearch);
m_trainSelector.SelectAttributes(getInputFormat());
// System.out.println(m_trainSelector.toResultsString());
m_SelectedAttributes = m_trainSelector.selectedAttributes();
if (m_SelectedAttributes == null) {
throw new Exception("No selected attributes\n");
}
setOutputFormat();
// Convert pending input instances
for (int i = 0; i < getInputFormat().numInstances(); i++) {
convertInstance(getInputFormat().instance(i));
}
flushInput();
}
m_NewBatch = true;
return (numPendingOutput() != 0);
}
/**
* Set the output format. Takes the currently defined attribute set
* m_InputFormat and calls setOutputFormat(Instances) appropriately.
*
* @throws Exception if something goes wrong
*/
protected void setOutputFormat() throws Exception {
Instances informat;
if (m_SelectedAttributes == null) {
setOutputFormat(null);
return;
}
FastVector attributes = new FastVector(m_SelectedAttributes.length);
int i;
if (m_ASEvaluator instanceof AttributeTransformer) {
informat = ((AttributeTransformer)m_ASEvaluator).transformedHeader();
} else {
informat = getInputFormat();
}
for (i=0;i < m_SelectedAttributes.length;i++) {
attributes.
addElement(informat.attribute(m_SelectedAttributes[i]).copy());
}
Instances outputFormat =
new Instances(getInputFormat().relationName(), attributes, 0);
if (!(m_ASEvaluator instanceof UnsupervisedSubsetEvaluator) &&
!(m_ASEvaluator instanceof UnsupervisedAttributeEvaluator)) {
outputFormat.setClassIndex(m_SelectedAttributes.length - 1);
}
setOutputFormat(outputFormat);
}
/**
* Convert a single instance over. Selected attributes only are transfered.
* The converted instance is added to the end of
* the output queue.
*
* @param instance the instance to convert
* @throws Exception if something goes wrong
*/
protected void convertInstance(Instance instance) throws Exception {
double[] newVals = new double[getOutputFormat().numAttributes()];
if (m_ASEvaluator instanceof AttributeTransformer) {
Instance tempInstance = ((AttributeTransformer)m_ASEvaluator).
convertInstance(instance);
for (int i = 0; i < m_SelectedAttributes.length; i++) {
int current = m_SelectedAttributes[i];
newVals[i] = tempInstance.value(current);
}
} else {
for (int i = 0; i < m_SelectedAttributes.length; i++) {
int current = m_SelectedAttributes[i];
newVals[i] = instance.value(current);
}
}
if (instance instanceof SparseInstance) {
push(new SparseInstance(instance.weight(), newVals));
} else {
push(new Instance(instance.weight(), newVals));
}
}
/**
* set options to their default values
*/
protected void resetOptions() {
m_trainSelector = new weka.attributeSelection.AttributeSelection();
setEvaluator(new CfsSubsetEval());
setSearch(new BestFirst());
m_SelectedAttributes = null;
m_FilterOptions = null;
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 5541 $");
}
/**
* Main method for testing this class.
*
* @param argv should contain arguments to the filter: use -h for help
*/
public static void main(String [] argv) {
runFilter(new AttributeSelection(), argv);
}
}
| |
// Copyright (C) 2014 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.testing;
import static com.google.common.base.Preconditions.checkArgument;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import static java.util.stream.Collectors.toSet;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.gerrit.server.logging.LoggingContext;
import java.lang.annotation.Annotation;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Map;
import org.junit.runner.Runner;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.Suite;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
/**
* Suite to run tests with different {@code gerrit.config} values.
*
* <p>For each {@link Config} method in the class and base classes, a new group of tests is created
* with the {@link Parameter} field set to the config.
*
* <pre>
* {@literal @}RunWith(ConfigSuite.class)
* public abstract class MyAbstractTest {
* {@literal @}ConfigSuite.Parameter
* protected Config cfg;
*
* {@literal @}ConfigSuite.Config
* public static Config firstConfig() {
* Config cfg = new Config();
* cfg.setString("gerrit", null, "testValue", "a");
* }
* }
*
* public class MyTest extends MyAbstractTest {
* {@literal @}ConfigSuite.Config
* public static Config secondConfig() {
* Config cfg = new Config();
* cfg.setString("gerrit", null, "testValue", "b");
* }
*
* {@literal @}Test
* public void myTest() {
* // Test using cfg.
* }
* }
* </pre>
*
* This creates a suite of tests with three groups:
*
* <ul>
* <li><strong>default</strong>: {@code MyTest.myTest}
* <li><strong>firstConfig</strong>: {@code MyTest.myTest[firstConfig]}
* <li><strong>secondConfig</strong>: {@code MyTest.myTest[secondConfig]}
* </ul>
*
* Additionally, config values used by <strong>default</strong> can be set in a method annotated
* with {@code @ConfigSuite.Default}.
*
* <p>In addition groups of tests for different configurations can be defined by annotating a method
* that returns a Map<String, Config> with {@link Configs}. The map keys define the test suite
* names, while the values define the configurations for the test suites.
*
* <pre>
* {@literal @}ConfigSuite.Configs
* public static Map<String, Config> configs() {
* Config cfgA = new Config();
* cfgA.setString("gerrit", null, "testValue", "a");
* Config cfgB = new Config();
* cfgB.setString("gerrit", null, "testValue", "b");
* return ImmutableMap.of("testWithValueA", cfgA, "testWithValueB", cfgB);
* }
* </pre>
*
* <p>The name of the config method corresponding to the currently-running test can be stored in a
* field annotated with {@code @ConfigSuite.Name}.
*/
public class ConfigSuite extends Suite {
private static final String FLOGGER_BACKEND_PROPERTY = "flogger.backend_factory";
private static final String FLOGGER_LOGGING_CONTEXT = "flogger.logging_context";
static {
System.setProperty(
FLOGGER_BACKEND_PROPERTY,
"com.google.common.flogger.backend.log4j.Log4jBackendFactory#getInstance");
System.setProperty(FLOGGER_LOGGING_CONTEXT, LoggingContext.class.getName() + "#getInstance");
}
public static final String DEFAULT = "default";
@Target({METHOD})
@Retention(RUNTIME)
public static @interface Default {}
@Target({METHOD})
@Retention(RUNTIME)
public static @interface Config {}
@Target({METHOD})
@Retention(RUNTIME)
public static @interface Configs {}
@Target({FIELD})
@Retention(RUNTIME)
public static @interface Parameter {}
@Target({FIELD})
@Retention(RUNTIME)
public static @interface Name {}
private static class ConfigRunner extends BlockJUnit4ClassRunner {
private final org.eclipse.jgit.lib.Config cfg;
private final Field parameterField;
private final Field nameField;
private final String name;
private ConfigRunner(
Class<?> clazz,
Field parameterField,
Field nameField,
String name,
org.eclipse.jgit.lib.Config cfg)
throws InitializationError {
super(clazz);
this.parameterField = parameterField;
this.nameField = nameField;
this.name = name;
this.cfg = cfg;
}
@Override
public Object createTest() throws Exception {
Object test = getTestClass().getJavaClass().getDeclaredConstructor().newInstance();
parameterField.set(test, new org.eclipse.jgit.lib.Config(cfg));
if (nameField != null) {
nameField.set(test, name);
}
return test;
}
@Override
protected String getName() {
return MoreObjects.firstNonNull(name, DEFAULT);
}
@Override
protected String testName(FrameworkMethod method) {
String n = method.getName();
return name == null ? n : n + "[" + name + "]";
}
}
private static List<Runner> runnersFor(Class<?> clazz) {
Method defaultConfig = getDefaultConfig(clazz);
List<Method> configs = getConfigs(clazz);
Map<String, org.eclipse.jgit.lib.Config> configMap =
callConfigMapMethod(getConfigMap(clazz), configs);
Field parameterField = getOnlyField(clazz, Parameter.class);
checkArgument(parameterField != null, "No @ConfigSuite.Parameter found");
Field nameField = getOnlyField(clazz, Name.class);
List<Runner> result = Lists.newArrayListWithCapacity(configs.size() + 1);
try {
result.add(
new ConfigRunner(
clazz, parameterField, nameField, null, callConfigMethod(defaultConfig)));
for (Method m : configs) {
result.add(
new ConfigRunner(clazz, parameterField, nameField, m.getName(), callConfigMethod(m)));
}
for (Map.Entry<String, org.eclipse.jgit.lib.Config> e : configMap.entrySet()) {
result.add(new ConfigRunner(clazz, parameterField, nameField, e.getKey(), e.getValue()));
}
return result;
} catch (InitializationError e) {
System.err.println("Errors initializing runners:");
for (Throwable t : e.getCauses()) {
t.printStackTrace();
}
throw new RuntimeException(e);
}
}
private static Method getDefaultConfig(Class<?> clazz) {
return getAnnotatedMethod(clazz, Default.class);
}
private static Method getConfigMap(Class<?> clazz) {
return getAnnotatedMethod(clazz, Configs.class);
}
private static <T extends Annotation> Method getAnnotatedMethod(
Class<?> clazz, Class<T> annotationClass) {
Method result = null;
for (Method m : clazz.getMethods()) {
T ann = m.getAnnotation(annotationClass);
if (ann != null) {
checkArgument(result == null, "Multiple methods annotated with %s: %s, %s", ann, result, m);
result = m;
}
}
return result;
}
private static List<Method> getConfigs(Class<?> clazz) {
List<Method> result = Lists.newArrayListWithExpectedSize(3);
for (Method m : clazz.getMethods()) {
Config ann = m.getAnnotation(Config.class);
if (ann != null) {
checkArgument(!m.getName().equals(DEFAULT), "%s cannot be named %s", ann, DEFAULT);
result.add(m);
}
}
return result;
}
private static org.eclipse.jgit.lib.Config callConfigMethod(Method m) {
if (m == null) {
return new org.eclipse.jgit.lib.Config();
}
checkArgument(
org.eclipse.jgit.lib.Config.class.isAssignableFrom(m.getReturnType()),
"%s must return Config",
m);
checkArgument((m.getModifiers() & Modifier.STATIC) != 0, "%s must be static", m);
checkArgument(m.getParameterTypes().length == 0, "%s must take no parameters", m);
try {
return (org.eclipse.jgit.lib.Config) m.invoke(null);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new IllegalArgumentException(e);
}
}
private static Map<String, org.eclipse.jgit.lib.Config> callConfigMapMethod(
Method m, List<Method> configs) {
if (m == null) {
return ImmutableMap.of();
}
checkArgument(Map.class.isAssignableFrom(m.getReturnType()), "%s must return Map", m);
Type[] types = ((ParameterizedType) m.getGenericReturnType()).getActualTypeArguments();
checkArgument(
String.class.isAssignableFrom((Class<?>) types[0]),
"The map returned by %s must have String as key",
m);
checkArgument(
org.eclipse.jgit.lib.Config.class.isAssignableFrom((Class<?>) types[1]),
"The map returned by %s must have Config as value",
m);
checkArgument((m.getModifiers() & Modifier.STATIC) != 0, "%s must be static", m);
checkArgument(m.getParameterTypes().length == 0, "%s must take no parameters", m);
try {
@SuppressWarnings("unchecked")
Map<String, org.eclipse.jgit.lib.Config> configMap =
(Map<String, org.eclipse.jgit.lib.Config>) m.invoke(null);
checkArgument(
!configMap.containsKey(DEFAULT),
"The map returned by %s cannot contain key %s (duplicate test suite name)",
m,
DEFAULT);
for (String name : configs.stream().map(Method::getName).collect(toSet())) {
checkArgument(
!configMap.containsKey(name),
"The map returned by %s cannot contain key %s (duplicate test suite name)",
m,
name);
}
return configMap;
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new IllegalArgumentException(e);
}
}
private static Field getOnlyField(Class<?> clazz, Class<? extends Annotation> ann) {
List<Field> fields = Lists.newArrayListWithExpectedSize(1);
for (Field f : clazz.getFields()) {
if (f.getAnnotation(ann) != null) {
fields.add(f);
}
}
checkArgument(
fields.size() <= 1,
"expected 1 @ConfigSuite.%s field, found: %s",
ann.getSimpleName(),
fields);
return Iterables.getFirst(fields, null);
}
public ConfigSuite(Class<?> clazz) throws InitializationError {
super(clazz, runnersFor(clazz));
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*/
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.thingml.networkplugins.c.posix;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.thingml.compilers.Context;
import org.thingml.compilers.c.CCompilerContext;
import org.thingml.compilers.spi.NetworkPlugin;
import org.thingml.compilers.spi.SerializationPlugin;
import org.thingml.xtext.helpers.AnnotatedElementHelper;
import org.thingml.xtext.thingML.Configuration;
import org.thingml.xtext.thingML.ExternalConnector;
import org.thingml.xtext.thingML.Message;
import org.thingml.xtext.thingML.Port;
import org.thingml.xtext.thingML.Protocol;
import org.thingml.xtext.thingML.Thing;
/**
*
* @author sintef
*/
public class PosixUDPPlugin extends NetworkPlugin {
CCompilerContext ctx;
public String getPluginID() {
return "PosixUDPPlugin";
}
public List<String> getSupportedProtocols() {
List<String> res = new ArrayList<>();
res.add("UDP");
res.add("udp");
return res;
}
public List<String> getTargetedLanguages() {
List<String> res = new ArrayList<>();
res.add("posix");
res.add("posixmt");
return res;
}
public void generateNetworkLibrary(Configuration cfg, Context ctx, Set<Protocol> protocols) {
this.ctx = (CCompilerContext) ctx;
for (Protocol prot : protocols) {
UDPPort port = new UDPPort();
port.protocol = prot;
try {
port.sp = ctx.getSerializationPlugin(prot);
} catch (UnsupportedEncodingException uee) {
System.err.println("Could not get serialization plugin... Expect some errors in the generated code");
uee.printStackTrace();
return;
}
for (ExternalConnector eco : this.getExternalConnectors(cfg, prot)) {
port.ecos.add(eco);
eco.setName(eco.getProtocol().getName());
}
port.generateNetworkLibrary(this.ctx, cfg);
}
}
private class UDPPort {
Set<ExternalConnector> ecos;
Protocol protocol;
Set<Message> messages;
SerializationPlugin sp;
String paramPort, paramIP;
UDPPort() {
ecos = new HashSet<>();
messages = new HashSet();
}
public void generateMessageForwarders(StringBuilder builder, StringBuilder headerbuilder, Configuration cfg, Protocol prot) {
for (ThingPortMessage tpm : getMessagesSent(cfg, prot)) {
Thing t = tpm.t;
Port p = tpm.p;
Message m = tpm.m;
SerializationPlugin sp = null;
try {
sp = ctx.getSerializationPlugin(prot);
} catch (UnsupportedEncodingException uee) {
System.err.println("Could not get serialization plugin... Expect some errors in the generated code");
uee.printStackTrace();
return;
}
builder.append("// Forwarding of messages " + prot.getName() + "::" + t.getName() + "::" + p.getName() + "::" + m.getName() + "\n");
builder.append("void forward_" + prot.getName() + "_" + ctx.getSenderName(t, p, m));
ctx.appendFormalParameters(t, builder, m);
builder.append("{\n");
String i = sp.generateSerialization(builder, "forward_buf", m);
builder.append("\n//Forwarding with specified function \n");
if(AnnotatedElementHelper.hasAnnotation(protocol, "udp_target_selection")) {
builder.append(prot.getName() + "_forwardMessage(forward_buf, " + i + ", " + paramIP + ", " + paramPort + ");\n");
} else {
builder.append(prot.getName() + "_forwardMessage(forward_buf, " + i + ");\n");
}
builder.append("}\n\n");
}
}
void generateNetworkLibrary(CCompilerContext ctx, Configuration cfg) {
if (!ecos.isEmpty()) {
String ctemplate = ctx.getTemplateByID("templates/PosixUDPPlugin.c");
String htemplate = ctx.getTemplateByID("templates/PosixUDPPlugin.h");
String portName = protocol.getName();
//Threaded listener --- BEGIN
ctx.addToInitCode("\n" + portName + "_instance.listener_id = add_instance(&" + portName + "_instance);\n");
StringBuilder initThread = new StringBuilder();
initThread.append("//" + portName + ":\n");
initThread.append(portName + "_setup();\n");
initThread.append("pthread_t thread_");
initThread.append(portName);
initThread.append(";\n");
initThread.append("pthread_create( &thread_");
initThread.append(portName);
initThread.append(", NULL, ");
initThread.append(portName + "_start_receiver_process");
initThread.append(", NULL);\n");
ctx.addToInitCode(initThread.toString());
//Threaded listener --- END
if(AnnotatedElementHelper.hasAnnotation(protocol, "udp_target_selection")) {
if (AnnotatedElementHelper.hasAnnotation(protocol, "udp_param_ip")) {
paramIP = AnnotatedElementHelper.annotation(protocol, "udp_param_ip").iterator().next();
}
if (AnnotatedElementHelper.hasAnnotation(protocol, "udp_param_port")) {
paramPort = AnnotatedElementHelper.annotation(protocol, "udp_param_port").iterator().next();
}
String remoteCfgForward = " memset((char *) &/*PORT_NAME*/_si_remote, 0, sizeof(/*PORT_NAME*/_si_remote));\n" +
"\n" +
" /*PORT_NAME*/_si_remote.sin_family = AF_INET;\n" +
" /*PORT_NAME*/_si_remote.sin_port = htons(port);\n" +
" /*PORT_NAME*/_si_remote.sin_addr = addr_from_uint32(ip);\n";
ctemplate = ctemplate.replace("/*REMOTE_CFG_SETUP*/", "");
ctemplate = ctemplate.replace("/*REMOTE_PARAM*/", ", uint32_t ip, uint16_t port");
ctemplate = ctemplate.replace("/*REMOTE_CFG_FORWARD*/", remoteCfgForward);
} else {
String remoteCfgSetup = " memset((char *) &/*PORT_NAME*/_si_remote, 0, sizeof(/*PORT_NAME*/_si_remote));\n" +
"\n" +
" /*PORT_NAME*/_si_remote.sin_family = AF_INET;\n" +
" /*PORT_NAME*/_si_remote.sin_port = htons(/*PORT_NAME*/_REMOTE_PORT);\n" +
" if (inet_aton(/*PORT_NAME*/_REMOTE_ADDR, &(/*PORT_NAME*/_si_remote.sin_addr)) == 0) {\n" +
" printf(\"Failed copying src address\\n\");\n" +
" }";
ctemplate = ctemplate.replace("/*REMOTE_CFG_SETUP*/", remoteCfgSetup);
ctemplate = ctemplate.replace("/*REMOTE_PARAM*/", "");
ctemplate = ctemplate.replace("/*REMOTE_CFG_FORWARD*/", "");
}
ctemplate = ctemplate.replace("/*PORT_NAME*/", portName);
htemplate = htemplate.replace("/*PORT_NAME*/", portName);
String address;
if (AnnotatedElementHelper.hasAnnotation(protocol, "udp_address")) {
address = AnnotatedElementHelper.annotation(protocol, "udp_address").iterator().next();
ctemplate = ctemplate.replace("/*REMOTE_ADDR*/", address);
}
Integer localPort;
if (AnnotatedElementHelper.hasAnnotation(protocol, "udp_local_port")) {
localPort = Integer.parseInt(AnnotatedElementHelper.annotation(protocol, "udp_local_port").iterator().next());
} else {
localPort = 10000;
}
ctemplate = ctemplate.replace("/*LOCAL_PORT*/", localPort.toString());
Integer remotePort;
if (AnnotatedElementHelper.hasAnnotation(protocol, "udp_remote_port")) {
remotePort = Integer.parseInt(AnnotatedElementHelper.annotation(protocol, "udp_remote_port").iterator().next());
} else {
remotePort = 10000;
}
ctemplate = ctemplate.replace("/*REMOTE_PORT*/", remotePort.toString());
//Parser
for (ThingPortMessage tpm : getMessagesReceived(cfg, protocol)) {
Message m = tpm.m;
messages.add(m);
}
StringBuilder ParserImplementation = new StringBuilder();
ParserImplementation.append("void " + portName + "_parser(byte * msg, uint16_t size");
if(AnnotatedElementHelper.hasAnnotation(protocol, "udp_target_selection")) {
ParserImplementation.append(", uint32_t provided_" + paramIP + ", uint16_t provided_" + paramPort);
}
ParserImplementation.append(") {\n");
sp.generateParserBody(ParserImplementation, "msg", "size", messages, portName + "_instance.listener_id");
ParserImplementation.append("}\n");
ctemplate = ctemplate.replace("/*PARSER_IMPLEMENTATION*/", sp.generateSubFunctions() + ParserImplementation);
String ParserCall = portName + "_parser(buf, recv_len";
if(AnnotatedElementHelper.hasAnnotation(protocol, "udp_target_selection")) {
ParserCall += ", " + portName + "_si_rcv.sin_addr.s_addr, ntohs(" + portName + "_si_rcv.sin_port)";
}
ParserCall += ");";
ctemplate = ctemplate.replace("/*PARSER_CALL*/", ParserCall);
ctemplate = ctemplate.replace("/*PARSER_IMPLEMENTATION*/", ParserImplementation);
//End parser
htemplate = htemplate.replace("/*PATH_TO_C*/", protocol.getName() + ".c");
StringBuilder b = new StringBuilder();
StringBuilder h = new StringBuilder();
generateMessageForwarders(b, h, cfg, protocol);
ctemplate += "\n" + b;
htemplate += "\n" + h;
ctx.getBuilder(protocol.getName() + ".c").append(ctemplate);
ctx.getBuilder(protocol.getName() + ".h").append(htemplate);
ctx.addToIncludes("#include \"" + protocol.getName() + ".h\"");
}
}
}
}
| |
package org.mitre.synthea.engine;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.stream.JsonReader;
import java.io.File;
import java.io.FileReader;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Matcher;
import org.mitre.synthea.modules.CardiovascularDiseaseModule;
import org.mitre.synthea.modules.EncounterModule;
import org.mitre.synthea.modules.HealthInsuranceModule;
import org.mitre.synthea.modules.LifecycleModule;
import org.mitre.synthea.modules.QualityOfLifeModule;
import org.mitre.synthea.world.agents.Person;
/**
* Module represents the entry point of a generic module.
*
* <p>The `modules` map is the static list of generic modules. It is loaded once per process,
* and the list of modules is shared between the generated population. Because we share modules
* across the population, it is important that States are cloned before they are executed.
* This keeps the "master" copy of the module clean.
*/
public class Module {
private static final Map<String, Module> modules = loadModules();
private static Map<String, Module> loadModules() {
Map<String, Module> retVal = new ConcurrentHashMap<String, Module>();
retVal.put("Lifecycle", new LifecycleModule());
retVal.put("Cardiovascular Disease", new CardiovascularDiseaseModule());
retVal.put("Quality Of Life", new QualityOfLifeModule());
retVal.put("Health Insurance", new HealthInsuranceModule());
try {
URL modulesFolder = ClassLoader.getSystemClassLoader().getResource("modules");
Path path = Paths.get(modulesFolder.toURI());
Files.walk(path, Integer.MAX_VALUE).filter(Files::isReadable).filter(Files::isRegularFile)
.filter(p -> p.toString().endsWith(".json")).forEach(t -> {
try {
Module module = loadFile(t, path);
String relativePath = relativePath(t, path);
retVal.put(relativePath, module);
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException(e);
}
});
} catch (Exception e) {
e.printStackTrace();
}
System.out.format("Loaded %d modules.\n", retVal.size());
return retVal;
}
private static String relativePath(Path filePath, Path modulesFolder) {
String folderString = Matcher.quoteReplacement(modulesFolder.toString() + File.separator);
return filePath.toString().replaceFirst(folderString, "").replaceFirst(".json", "")
.replace("\\", "/");
}
public static Module loadFile(Path path, Path modulesFolder) throws Exception {
System.out.format("Loading %s\n", path.toString());
boolean submodule = !path.getParent().equals(modulesFolder);
JsonObject object = null;
FileReader fileReader = null;
JsonReader reader = null;
fileReader = new FileReader(path.toString());
reader = new JsonReader(fileReader);
JsonParser parser = new JsonParser();
object = parser.parse(reader).getAsJsonObject();
fileReader.close();
reader.close();
return new Module(object, submodule);
}
public static String[] getModuleNames() {
return modules.keySet().toArray(new String[modules.size()]);
}
/**
* @return a list of top-level modules. Submodules are not included.
*/
public static List<Module> getModules() {
List<Module> list = new ArrayList<Module>();
modules.forEach((k, v) -> {
if (!v.submodule) {
list.add(v);
}
});
return list;
}
/**
* @param path
* : the relative path of the module, without the root or ".json" file extension. For
* example, "medications/otc_antihistamine" or "appendicitis".
* @return module : the given module
*/
public static Module getModuleByPath(String path) {
return modules.get(path);
}
public String name;
public boolean submodule;
public List<String> remarks;
private Map<String, State> states;
protected Module() {
// no-args constructor only allowed to be used by subclasses
}
public Module(JsonObject definition, boolean submodule) throws Exception {
name = String.format("%s Module", definition.get("name").getAsString());
this.submodule = submodule;
remarks = new ArrayList<String>();
if (definition.has("remarks")) {
JsonElement jsonRemarks = definition.get("remarks");
for (JsonElement value : jsonRemarks.getAsJsonArray()) {
remarks.add(value.getAsString());
}
}
JsonObject jsonStates = definition.get("states").getAsJsonObject();
states = new ConcurrentHashMap<String, State>();
for (Entry<String, JsonElement> entry : jsonStates.entrySet()) {
State state = State.build(this, entry.getKey(), entry.getValue().getAsJsonObject());
states.put(entry.getKey(), state);
}
}
/**
* Process this Module with the given Person at the specified time within the simulation.
*
* @param person
* : the person being simulated
* @param time
* : the date within the simulated world
* @return completed : whether or not this Module completed.
*/
@SuppressWarnings("unchecked")
public boolean process(Person person, long time) {
person.history = null;
// what current state is this person in?
if (!person.attributes.containsKey(this.name)) {
person.history = new LinkedList<State>();
person.history.add(initialState());
person.attributes.put(this.name, person.history);
}
person.history = (List<State>) person.attributes.get(this.name);
String activeKey = EncounterModule.ACTIVE_WELLNESS_ENCOUNTER + " " + this.name;
if (person.attributes.containsKey(EncounterModule.ACTIVE_WELLNESS_ENCOUNTER)) {
person.attributes.put(activeKey, true);
}
State current = person.history.get(0);
// System.out.println(" Resuming at " + current.name);
// process the current state,
// looping until module is finished,
// probably more than one state
String nextStateName = null;
while (current.run(person, time)) {
Long exited = current.exited;
nextStateName = current.transition(person, time);
// System.out.println(" Transitioning to " + nextStateName);
current = states.get(nextStateName).clone(); // clone the state so we don't dirty the original
person.history.add(0, current);
if (exited != null && exited < time) {
// This must be a delay state that expired between cycles, so temporarily rewind time
process(person, exited);
current = person.history.get(0);
}
}
person.attributes.remove(activeKey);
return (current instanceof State.Terminal);
}
private State initialState() {
return states.get("Initial"); // all Initial states have name Initial
}
public State getState(String name) {
return states.get(name);
}
/**
* Get a collection of the names of all the states this Module contains.
*
* @return set of all state names, or empty set if this is a non-GMF module
*/
public Collection<String> getStateNames() {
if (states == null) {
// ex, if this is a non-GMF module
return Collections.emptySet();
}
return states.keySet();
}
}
| |
package com.google.sps.servlets;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Random;
import java.util.TimeZone;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPatch;
import org.apache.http.entity.AbstractHttpEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormat;
import org.json.simple.JSONObject;
import org.json.simple.JSONArray;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
// Gson
import com.google.gson.*;
// Date and Time
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Calendar;
import java.util.Date;
import java.time.ZoneId;
import org.joda.time.Interval;
import org.joda.time.DateTimeZone;
import org.joda.time.DateTime;
@WebServlet("/schedule-generator")
public class ScheduleGenerationServlet extends HttpServlet {
private static HTTP http = new HTTP();
private Time TIME = new Time();
private Datastore DB = new Datastore();
private Gson g = new Gson();
private int successfulRecurringEventsCreated = 0;
// Primary ID used to identify user on databsse
private String primary_id = "";
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
// reset successfulEventsCreated
successfulRecurringEventsCreated = 0;
// Grab access token and make sure it's valid! REQUIRED!
String accessToken = (String) request.getSession(false).getAttribute("access_token");
if (!http.isAccessTokenValid(accessToken)) {
response.sendRedirect("/request-permission");
return;
}
// Check to make sure the user has a settings page ready to go to read from in the database!
GetCalendar getCalendar = new GetCalendar();
String json = new HTTP().get(getCalendar.createGetCalendarURL("primary", accessToken));
JSONObject jsonObject = new HTTP().parseJSON(json);
this.primary_id = (String) jsonObject.get("id");
if (DB.getUser(primary_id) == null) {
response.sendRedirect("/request-permission");
return;
}
String userEventsJSON = DB.getUserSetting(this.primary_id, "userEventsChoice");
int userEventsChoice = g.fromJson(userEventsJSON, Integer.class);
GenerateSchedule(request);
if (this.successfulRecurringEventsCreated < userEventsChoice) {
GenerateSchedule(request);
}
if (this.successfulRecurringEventsCreated < userEventsChoice) {
response.setContentType("application/json");
JSONObject sendJSON = new JSONObject();
sendJSON.put("error", "The Schedule Generator was not able to schedule maximum events." +
"1) Add more times and durations 2) Add more days or a bigger span to attempt to add recurring events! 3) Or free up your calendar busy bee! ;)");
response.getWriter().print(sendJSON);
return;
}
response.sendRedirect("/home.html");
}
// ---------------- Functions For Google Calendar API Access ----------------- //
// Creates a new calendar for the user
JSONObject createNewCalendar(DefaultHttpClient httpClient, String accessToken, String summary) {
NewCalendar calendar = new NewCalendar(summary);
HttpPost postRequest = new HttpPost(calendar.createNewCalendarURL(accessToken));
Gson gson = new Gson();
String json = gson.toJson(calendar);
return http.postWithData(httpClient, postRequest, json);
}
// Get an Event Resources
JSONObject getEvent(DefaultHttpClient httpClient, String accessToken, String calendar_id, String eventId) {
GetEvent eventObj = new GetEvent();
String json = "";
try {
json = http.get(eventObj.createGetEventURL(calendar_id, eventId, accessToken, Time.timezone));
} catch (Exception e) {
System.out.println("There was an error getting the event resource." + e);
}
return http.parseJSON(json);
}
// Delete event
void deleteEvent(String accessToken, String calendar_id, String eventId) {
DeleteEvent eventObj = new DeleteEvent();
String json = "";
try {
json = http.delete(eventObj.createDeleteEventURL(calendar_id, eventId, accessToken));
} catch (Exception e) {
System.out.println("There was an error deleting the event. " + e);
}
}
// Creates a new calendar event on the calendar_id specified.
void createNewEvent(DefaultHttpClient httpClient, String accessToken, String id, DateTime startTime, DateTime endTime,
String summary, String description, int recurrenceLengthInWeeks) {
// 1) Set start date and end date for recurrence
int dayOfWeek = startTime.getDayOfWeek();
// 2) Add event fields: Summary, Description, StartTime, EndTime, Recurrence
NewEvent event = new NewEvent(summary, description);
// Get zone ID since Java must be told the time zone.
ZoneId z = ZoneId.of( TIME.timezone ) ;
DateTimeFormatter fmt = DateTimeFormatter
.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX")
.withZone(z);
// Set event START TIME and END TIME
event.setStart(fmt.format(startTime.toDate().toInstant()), TIME.timezone);
event.setEnd(fmt.format(endTime.toDate().toInstant()), TIME.timezone);
// Add a recurrence to this event that: repeats on THE DAYS GIVEN until X week(s) from now
event.addRecurrence(TIME.createWeekRecurrence(TIME.onDays(dayOfWeek), TIME.addWeeks(startTime.toDate(), recurrenceLengthInWeeks)));
// 3) Make request
Gson gson = new Gson();
String json = gson.toJson(event);
HttpPost postRequest = new HttpPost(event.createNewEventURL(id, accessToken));
http.postWithData(httpClient, postRequest, json);
}
// Updates calendar id event
JSONObject updateEvent(DefaultHttpClient httpClient, String accessToken, String calendar_id, String id, String description) {
PatchEvent calendar = new PatchEvent(description);
HttpPatch patchRequest = new HttpPatch(calendar.createPatchEventURL(calendar_id, id, accessToken));
Gson gson = new Gson();
String json = gson.toJson(calendar);
return http.patchWithData(httpClient, patchRequest, json);
}
// Returns a setting on the user's Google Calendar
JSONObject getSetting(String accessToken, String setting) {
GetSetting getSetting = new GetSetting();
String json = "";
try {
json = http.get(getSetting.createGetSettingURL(setting, accessToken));
} catch (Exception e) {
System.out.println("There was an error getting the setting: " + e);
}
return http.parseJSON(json);
}
// Returns the freeBusy information for a specific timeMin and timeMax
JSONObject getFreeBusy(DefaultHttpClient httpClient, String accessToken, String timeMin, String timeMax, String timeZone, List<String> ids) {
GetFreeBusy busy = new GetFreeBusy(timeMin, timeMax, timeZone);
for (String id : ids) {
busy.addId(id);
}
busy.updateCalendarExpansionTo(ids.size());
HttpPost postRequest = new HttpPost(busy.createGetFreeBusyURL(accessToken));
Gson gson = new Gson();
String json = gson.toJson(busy);
return http.postWithData(httpClient, postRequest, json);
}
// Returns a list of calendars that the user has on their calendar
JSONObject getCalendarsList(DefaultHttpClient httpClient, String accessToken) {
ListCalendars list = new ListCalendars();
String json = "";
try {
json = http.get(list.createListCalendarsURL(accessToken));
} catch (Exception e) {
System.out.println("There was an error getting the list of calendars." + e);
}
return http.parseJSON(json);
}
// Returns a list of events on a user's calendar
JSONObject getListEvents(DefaultHttpClient httpClient, String accessToken, String calendar_id) {
ListEvents list = new ListEvents();
String json = "";
try {
json = http.get(list.createListEventsURL(calendar_id, accessToken));
} catch (Exception e) {
System.out.println("There was an error getting the list of events for a user's calendar." + e);
}
return http.parseJSON(json);
}
// Returns a list of instances for a recurring event
JSONObject getEventRecurrenceInstances(DefaultHttpClient httpClient, String accessToken, String calendar_id, String recurring_event_id) {
GetEventInstance instances = new GetEventInstance();
String json = "";
try {
json = http.get(instances.createGetEventInstanceURL(calendar_id, recurring_event_id, accessToken));
} catch (Exception e) {
System.out.println("There was an error getting the list of instances for a recurring event. " + e);
}
return http.parseJSON(json);
}
// This function will be used to get all the calendar ID's
public List<String> getAllCalendarsAttribute(DefaultHttpClient httpClient, String accessToken, String attr) {
JSONObject userCalendars = getCalendarsList(httpClient, accessToken);
JSONArray items = (JSONArray) userCalendars.get("items");
Iterator iter = items.iterator();
List<String> listAttr = new ArrayList<String>();
while (iter.hasNext()) {
JSONObject calendarResource = (JSONObject) iter.next();
String calenderAttr = (String) calendarResource.get(attr);
listAttr.add(calenderAttr);
}
return listAttr;
}
// ----------------- // Utility Functions // --------------------- //
/** This is a pure function meaning that it will generate the same schedule if the same settings are present.
*
* In a nutshell, we simply try to evenly place recurring events within our SPAN.
* For example our SPAN == 7 and the today is Monday assuming Default Settings and FreeTime every day.
* X X X
* M T W T F S S
*
* stepValue: Is the number of days we skip. Currently, within the span we skip span/3 days to start.
* Example: Our span is 7. 7/3 = 2. We skip every other day.
*
*
* The interesting case is when we land on a INVALID day.
* When this happens, we simply move our countofDaysFromNow + 1 + stepValue.
* All this means, is we go to 1 week from tomorrow and try that day. Increasing our stepValue by 1.
*
* @param request To get access to resources, access token, and more.
* @return none we simply generate the schedule.
*/
private void GenerateSchedule(HttpServletRequest request) {
String accessToken = (String) request.getSession(false).getAttribute("access_token");
String searchKeyword = (String) request.getSession(false).getAttribute("searchKeyword");
DefaultHttpClient httpClient = new DefaultHttpClient();
String id = "";
// Check calendar not already made
if (studyScheduleNotMade(httpClient, accessToken, "summary")) {
JSONObject calendar = createNewCalendar(httpClient, accessToken, "Study Schedule");
id = (String) calendar.get("id");
request.getSession().setAttribute("study-schedule-id", id);
}
id = (String) request.getSession(false).getAttribute("study-schedule-id");
// Grab timezone to be used with creation of event.
JSONObject timeZoneSetting = getSetting(accessToken, "timezone");
String timezone = (String) timeZoneSetting.get("value");
TIME.setTimeZoneId(timezone);
// Calendars we take into account for FreeBusy information here
List<String> allCalendarIds = getAllCalendarsAttribute(httpClient, accessToken, "id");
List<String> ids = new ArrayList<String>();
// Remove bad calendar ids. The calendars that contain '#', are not actual calendars that have events.
for (String calendar_id : allCalendarIds) {
if (!calendar_id.contains("#")) {
ids.add(calendar_id);
}
}
// Let's try to create events! We depend on RECURRING EVENTS. So, upto the next week, we try to create
// To evenly space events within range.
String spanGSON = DB.getUserSetting(this.primary_id, "eventLookSpan");
int span = g.fromJson(spanGSON, Integer.class);
int stepValue = span / 3;
int countOfDaysFromNow = 0;
List<Integer> eventAlreadyScheduled = new ArrayList<Integer>();
// Get study schedule intensity from database
String userEventsJSON = DB.getUserSetting(this.primary_id, "userEventsChoice");
int userEventsChoice = g.fromJson(userEventsJSON, Integer.class);
// Get event look span from database
String eventLookJSON = DB.getUserSetting(this.primary_id, "eventLookSpan");
int eventLookSpan = g.fromJson(eventLookJSON, Integer.class);
// Get days event could be on
String onDaysJSON = DB.getUserSetting(this.primary_id, "onDays");
List<Integer> possibleDays = g.fromJson(onDaysJSON, new TypeToken<List<Integer>>(){}.getType());
// Get event recurrence length
String eventRecurrenceLengthJSON = DB.getUserSetting(this.primary_id, "eventRecurrenceLength");
int eventRecurrenceLength = g.fromJson(eventRecurrenceLengthJSON, Integer.class);
for (int i = 0; i < span; i++) {
// If we created max number of events
if (this.successfulRecurringEventsCreated == userEventsChoice) break;
if (new DateTime().plusDays(countOfDaysFromNow).isAfter(new DateTime().plusWeeks(eventLookSpan))) break;
Boolean isvalid = checkDateIsValid(possibleDays, getCurrentDay(countOfDaysFromNow));
// Check that day is valid
if (!isvalid) {
span += 7; // Increase our possible span.
//countOfDaysFromNow += stepValue; // To try the next day, 1 week in advance.
countOfDaysFromNow++;
stepValue++; // Increase step value.
continue;
}
// If day is greater than or equal to span
if (countOfDaysFromNow >= span) {
countOfDaysFromNow = 0;
// Move day to next day that was not already scheduled
for (Integer event : eventAlreadyScheduled) {
if (event == countOfDaysFromNow) countOfDaysFromNow++;
else break;
}
// Decrease step value because we stepValue exceeded our span.
--stepValue;
}
List<DateTime> times = getStartInformationForPossibleEvent(httpClient, accessToken, countOfDaysFromNow , timezone, ids);
// If we have times to schedule, schedule those times
if (!times.isEmpty() && isvalid) {
try {
createNewEvent(httpClient, accessToken, id, times.get(0), times.get(1), "Study Session - " + searchKeyword, "", eventRecurrenceLength);
++this.successfulRecurringEventsCreated;
} catch (Exception e) {
System.out.println("There was an error trying to create a new event" + e);
}
// Add day
eventAlreadyScheduled.add(countOfDaysFromNow);
}
// Move to next day
countOfDaysFromNow += stepValue;
}
String resourcesGSON = DB.getUserSetting(this.primary_id, "resources");
List<String> resources = g.fromJson(resourcesGSON, List.class);
editEventInstance(httpClient, accessToken, id, resources, ids);
}
// Current day of the week
public int getCurrentDay(final int day) {
// Get start week
String startWeekJSON = DB.getUserSetting(this.primary_id, "startWeek");
int startWeek = g.fromJson(startWeekJSON, Integer.class);
// Get start Day
String startDayJSON = DB.getUserSetting(this.primary_id, "startDay");
int startDay = g.fromJson(startDayJSON, Integer.class);
DateTime currentDateTime = new DateTime().plusWeeks(startWeek).plusDays(startDay + day);
return currentDateTime.getDayOfWeek();
}
// Check that a given day is a valid day to set an event based on user settings
public Boolean checkDateIsValid(List<Integer> validDays, int currentDay) {
if (validDays.isEmpty()) return true;
for (Integer possibleDay : validDays) {
if (possibleDay == currentDay) return true;
}
return false;
}
// This function gives us the Start Time and End Time which we directly pass to createNewEvent! So let's say we're given a day. We get the freeBusy information for user on that day.
// Then for each study session duration simply loop through each study session start time and see if range is valid.
public List<DateTime> getStartInformationForPossibleEvent(DefaultHttpClient httpClient, String accessToken, Integer currentDay, String timeZone, List<String> ids ) {
// Get start week
String startWeekJSON = DB.getUserSetting(this.primary_id, "startWeek");
int startWeek = g.fromJson(startWeekJSON, Integer.class);
// Get start Day
String startDayJSON = DB.getUserSetting(this.primary_id, "startDay");
int startDay = g.fromJson(startDayJSON, Integer.class);
// Get length of current date
String dayStart = TIME.setTime(0 + startWeek, currentDay + startDay, 0, 0);
String dayEnd = TIME.setTime(0 + startWeek, currentDay + startDay, 23, 59);
JSONObject jsonObject = getFreeBusy(httpClient, accessToken, dayStart, dayEnd, timeZone, ids);
// Go through nested response
JSONObject calendar = (JSONObject) jsonObject.get("calendars");
JSONObject obj = (JSONObject) calendar.get(ids.get(0));
JSONArray array = (JSONArray) obj.get("busy"); // array cannot be null so we must initialize it with primary id
for (String id : ids) {
if (id == ids.get(0)) continue; // So we don't add our primary ID twice!
try {
JSONObject cal = (JSONObject) calendar.get(id);
JSONArray busyTime = (JSONArray) cal.get("busy");
array.addAll(busyTime);
} catch (Exception e) {
System.out.println("An exception occurred when trying to get busytime!" + e);
continue;
}
}
// We simply loop through each duration and then loop through each starting time and try to schedule that event on that day!
DateTime timeToTry = new DateTime().withZone(DateTimeZone.forID(Time.timezone)).plusWeeks(startWeek).plusDays(currentDay + startDay);
DateTime timeToTryEnd = null;
List<DateTime> listOfValidTimes = new ArrayList<DateTime>();
// Get start times
String startTimesJSON = DB.getUserSetting(this.primary_id, "start");
List<List<Integer>> starttimes = g.fromJson(startTimesJSON, new TypeToken<List<List<Integer>>>(){}.getType());
// Get max duration
String durationsJSON = DB.getUserSetting(this.primary_id, "length");
List<List<Integer>> durations = g.fromJson(durationsJSON, new TypeToken<List<List<Integer>>>(){}.getType());
// Start with the max duration
for (List<Integer> duration : durations) {
// Loop through specific study session start times and see if one works.
for (List<Integer> time : starttimes) {
int rnd = new Random().nextInt(starttimes.size());
List<Integer> possible = starttimes.get(rnd);
timeToTry = timeToTry.withZone(DateTimeZone.forID(Time.timezone)).withHourOfDay(possible.get(0)).withMinuteOfHour(possible.get(1)).withSecondOfMinute(0);
// Check that we have not already gone past the time we are trying to schedule!
if (new DateTime().isAfter(timeToTry)) continue;
// Start with the max duration
Boolean foundOverlap = false;
// Get duration of starttime ; NOTE: we subtract 1 because overlap works by: If BUSY: 5:00pm-6:00pm ; We would not schedule: 4:00PM-5:00pm but we should.
timeToTryEnd = timeToTry.plusHours(duration.get(0)).plusMinutes(duration.get(1) - 1);
Interval studySession = new Interval(timeToTry, timeToTryEnd);
Iterator i = array.iterator();
// Loop through busy periods and check if our duration could fit at any point.
while (i.hasNext()) {
// Grab each start and end time.
JSONObject busyTimePeriod = (JSONObject) i.next();
String busyStartTime = (String) busyTimePeriod.get("start");
String busyEndTime = (String) busyTimePeriod.get("end");
DateTime startBusy = DateTime.parse(busyStartTime).withZone(DateTimeZone.forID(Time.timezone));
DateTime endBusy = DateTime.parse(busyEndTime).withZone(DateTimeZone.forID(Time.timezone));
// Create an interval
Interval busyInterval = new Interval(startBusy, endBusy);
// If study session ever overlaps with any of the busy overlaps WITH ANY busy interval we can no longer set that as an event!
if (busyInterval.overlaps(studySession)) {
foundOverlap = true;
break;
}
}
// Study session with duraion FOUND. FUTURE: We could just add to the list here
if (!foundOverlap) {
// TODO(paytondennis@) Check ratio here coming soon; SOON
// Check ratio of next 4 weeks. Need to call freebusy for that time
listOfValidTimes.add(timeToTry);
listOfValidTimes.add(timeToTryEnd.plusMinutes(1)); // add 1 minute back, because look at above comment!
return listOfValidTimes;
}
}
}
return listOfValidTimes;
}
// Puts resources in each recurring event instance and tries to fix each event.
public void editEventInstance(DefaultHttpClient httpClient, String accessToken, String calendar_id, List<String> resources, List<String> ids) {
ids.remove(calendar_id);
// Get all recurring event IDS.
JSONObject obj = getListEvents(httpClient, accessToken, calendar_id);
JSONArray items = (JSONArray) obj.get("items");
Iterator i = items.iterator();
// Calculate event recurrence
String eventRecurrenceLengthJSON = DB.getUserSetting(this.primary_id, "eventRecurrenceLength");
Integer eventRecurrenceLength = g.fromJson(eventRecurrenceLengthJSON, Integer.class);
// Get study intensity
String userEventsChoiceJSON = DB.getUserSetting(this.primary_id, "userEventsChoice");
Integer userEventsChoice = g.fromJson(userEventsChoiceJSON, Integer.class);
int resourceCount = 0;
while (i.hasNext()) {
if (resourceCount == eventRecurrenceLength * userEventsChoice) break;
// Create current IDS in this recurrence
List<String> currentIdsInRecurrence = new ArrayList<String>();
JSONObject event = (JSONObject) i.next();
String recurring_eid = (String) event.get("id");
// For this id, loop through each instance
JSONObject instances = getEventRecurrenceInstances(httpClient, accessToken, calendar_id, recurring_eid);
JSONArray recurringEvents = (JSONArray) instances.get("items");
Iterator j = recurringEvents.iterator();
int counter = 0;
JSONObject beginningEvent = null;
JSONObject endEvent = null;
while (j.hasNext()) {
++resourceCount;
JSONObject specificInstance = (JSONObject) j.next();
String specific_recurring_eid = (String) specificInstance.get("id");
currentIdsInRecurrence.add(specific_recurring_eid);
if (counter == 0) beginningEvent = specificInstance;
endEvent = specificInstance;
putResourcesInEvents(resources, resourceCount, httpClient, accessToken, calendar_id, specific_recurring_eid);
++counter;
if (counter == eventRecurrenceLength) break;
}
fixOverlappingEvents(httpClient, accessToken, calendar_id, beginningEvent, endEvent, ids, currentIdsInRecurrence);
}
}
public void putResourcesInEvents(List<String> resources, int resourceCount, DefaultHttpClient httpClient, String accessToken, String calendar_id, String specific_recurring_eid) {
String res = "";
String description = DB.getUserSetting(this.primary_id, "description");
try {
res = "Resource: " + resources.get(resourceCount) + " \n----\n" + description;
} catch (Exception e) {
res = description;
}
// Now make updates to this event as necessary
try {
updateEvent(httpClient, accessToken, calendar_id, specific_recurring_eid, res);
} catch (Exception e) {
System.out.println(e);
}
}
public void fixOverlappingEvents(DefaultHttpClient httpClient, String accessToken, String calendar_id, JSONObject beginningEvent, JSONObject endEvent,
List<String> ids, List<String> currentIdsInRecurrence) {
// Get Free Busy information for span of recurring event
// Get range for FreeBusy information
JSONObject eventResourceStart = null;
JSONObject eventResourceEnd = null;
if (beginningEvent == null || endEvent == null) return;
String freeBusyStartID = (String) beginningEvent.get("id");
String freeBusyEndID = (String) endEvent.get("id");
try {
eventResourceStart = getEvent(httpClient, accessToken, calendar_id, freeBusyStartID);
eventResourceEnd = getEvent(httpClient, accessToken, calendar_id, freeBusyEndID);
} catch (Exception e) {
System.out.println(e);
return;
}
JSONObject startOBJ = (JSONObject) eventResourceStart.get("start");
JSONObject endOBJ = (JSONObject) eventResourceEnd.get("end");
String startTime = (String) startOBJ.get("dateTime");
String endTime = (String) endOBJ.get("dateTime");
DateTime startEventFreeBusyTime = DateTime.parse(startTime);
DateTime endEventFreeBusyTime = DateTime.parse(endTime);
// Get free busy for entire span of time.
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ssXXX")
.withZone(Time.timeZoneId);
String s = formatter.ISO_OFFSET_DATE_TIME.withZone(Time.timeZoneId).format(startEventFreeBusyTime.toDate().toInstant());
String e = formatter.ISO_OFFSET_DATE_TIME.withZone(Time.timeZoneId).format(endEventFreeBusyTime.toDate().toInstant());
JSONObject freeBusy = null;
try {
freeBusy = getFreeBusy(httpClient, accessToken, s, e, Time.timezone, ids);
} catch (Exception err) {
System.out.println(err);
return;
}
String deleteOverlappingEventsJSON = DB.getUserSetting(this.primary_id, "deleteOverlappingEvents");
Boolean deleteOverlappingEvents = g.fromJson(deleteOverlappingEventsJSON, Boolean.class);
// Loop back through events
for (String id : currentIdsInRecurrence) {
// Perform action if overlap!
JSONObject instanceObject = getEvent(httpClient, accessToken, calendar_id, id);
JSONObject sObject = (JSONObject) instanceObject.get("start");
JSONObject eObject = (JSONObject) instanceObject.get("end");
String sTime = (String) sObject.get("dateTime");
String eTime = (String) eObject.get("dateTime");
DateTime sEventTime = DateTime.parse(sTime);
DateTime eEventTime = DateTime.parse(eTime);
if (foundOverlap(sEventTime, eEventTime, freeBusy, ids)) {
if (deleteOverlappingEvents)
deleteEvent(accessToken, calendar_id, id);
}
// Get event endtime and stop until busyfreetime is after endtime
}
}
// UTILITY function that returns true if found a overlap!
Boolean foundOverlap(DateTime eventStart, DateTime eventEnd, JSONObject jsonObject, List<String> ids) {
// Go through nested response
JSONObject calendar = (JSONObject) jsonObject.get("calendars");
JSONObject obj = (JSONObject) calendar.get(ids.get(0));
JSONArray array = (JSONArray) obj.get("busy"); // array cannot be null so we must initialize it with primary id
for (String id : ids) {
if (id == ids.get(0)) continue;
try {
JSONObject cal = (JSONObject) calendar.get(id);
JSONArray busyTime = (JSONArray) cal.get("busy");
array.addAll(busyTime);
} catch (Exception e) {
System.out.println("An exception occurred when trying to get busytime!" + e);
continue;
}
}
Interval studySession = new Interval(eventStart, eventEnd.plusMinutes(-1));
Iterator i = array.iterator();
// Loop through busy periods and check if our duration could fit at any point.
while (i.hasNext()) {
// Grab each start and end time.
JSONObject busyTimePeriod = (JSONObject) i.next();
String busyStartTime = (String) busyTimePeriod.get("start");
String busyEndTime = (String) busyTimePeriod.get("end");
DateTime startBusy = DateTime.parse(busyStartTime).withZone(DateTimeZone.forID(Time.timezone));
DateTime endBusy = DateTime.parse(busyEndTime).withZone(DateTimeZone.forID(Time.timezone));
// Create an interval
Interval busyInterval = new Interval(startBusy, endBusy);
// If study session ever overlaps with any of the busy overlaps WITH ANY busy interval we can no longer set that as an event!
if (busyInterval.overlaps(studySession) || studySession.overlaps(busyInterval)) return true;
if (startBusy.isAfter(eventEnd)) break;
}
return false;
}
// Check that we have not already made study schedule
public Boolean studyScheduleNotMade(DefaultHttpClient httpClient, String accessToken, String attr) {
List<String> allCalendarTitles = getAllCalendarsAttribute(httpClient, accessToken, attr);
for (String title : allCalendarTitles) {
if (title.equals("Study Schedule")) {
return false;
}
}
return true;
}
}
| |
/*
* Copyright (C) 2009-2013 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbson;
import org.jruby.*;
import org.jruby.runtime.builtin.*;
import org.jruby.runtime.*;
import org.jruby.runtime.callsite.*;
import org.jruby.util.*;
import org.jruby.javasupport.*;
import java.io.*;
import java.lang.*;
import java.util.*;
import java.util.regex.*;
import org.bson.*;
import org.bson.types.*;
public class RubyBSONCallback implements BSONCallback {
private RubyHash _root;
private RubyModule _rbclsOrderedHash;
private RubyModule _rbclsObjectId;
private RubyModule _rbclsBinary;
private RubyModule _rbclsMinKey;
private RubyModule _rbclsMaxKey;
private RubyModule _rbclsTimestamp;
private RubyModule _rbclsCode;
private RubyModule _rbclsBSONRegex;
private RubyHash _opts;
private final LinkedList<RubyObject> _stack = new LinkedList<RubyObject>();
private final LinkedList<String> _nameStack = new LinkedList<String>();
private Ruby _runtime;
public RubyBSONCallback(Ruby runtime) {
_runtime = runtime;
_rbclsOrderedHash = runtime.getClassFromPath( "BSON::OrderedHash" );
_rbclsBinary = runtime.getClassFromPath( "BSON::Binary" );
_rbclsCode = runtime.getClassFromPath( "BSON::Code" );
_rbclsMinKey = runtime.getClassFromPath( "BSON::MinKey" );
_rbclsMaxKey = runtime.getClassFromPath( "BSON::MaxKey" );
_rbclsTimestamp = runtime.getClassFromPath( "BSON::Timestamp" );
_rbclsObjectId = runtime.getClassFromPath( "BSON::ObjectId" );
_rbclsBSONRegex = runtime.getClassFromPath( "BSON::Regex" );
}
public BSONCallback createBSONCallback(){
return new RubyBSONCallback(_runtime);
}
public void set_opts(RubyHash opts){
_opts = opts;
}
public void reset(){
_root = null;
_stack.clear();
_nameStack.clear();
}
public RubyHash createHash() { // OrderedHash
return (RubyHash)JavaEmbedUtils.invokeMethod(_runtime, _rbclsOrderedHash, "new",
new Object[] { }, Object.class);
}
public RubyArray createArray() {
return RubyArray.newArray(_runtime);
}
public RubyObject create( boolean array , List<String> path ){
if ( array )
return createArray();
return createHash();
}
public void objectStart(){
if ( _stack.size() > 0 ) {
throw new IllegalStateException( "Error! An illegal state ocurred." );
}
_root = createHash();
_stack.add(_root);
}
public void objectStart(boolean f) {
objectStart();
}
public void objectStart(String key){
RubyHash hash = createHash();
_nameStack.addLast( key );
RubyObject lastObject = _stack.getLast();
if(lastObject instanceof RubyHash) {
writeRubyHash(key, (RubyHash)lastObject, (IRubyObject)hash);
}
else {
writeRubyArray(key, (RubyArray)lastObject, (IRubyObject)hash);
}
_stack.addLast( (RubyObject)hash );
}
// Note: we use []= because we're dealing with an OrderedHash, which in 1.8
// doesn't have an internal JRuby representation.
public void writeRubyHash(String key, RubyHash hash, IRubyObject obj) {
RubyString rkey = _runtime.newString(key);
JavaEmbedUtils.invokeMethod(_runtime, hash, "[]=",
new Object[] { (IRubyObject)rkey, obj }, Object.class);
}
public void writeRubyArray(String key, RubyArray array, IRubyObject obj) {
Long index = Long.parseLong(key);
array.store(index, obj);
}
public void arrayStart(String key){
RubyArray array = createArray();
RubyObject lastObject = _stack.getLast();
_nameStack.addLast( key );
if(lastObject instanceof RubyHash) {
writeRubyHash(key, (RubyHash)lastObject, (IRubyObject)array);
}
else {
writeRubyArray(key, (RubyArray)lastObject, (IRubyObject)array);
}
_stack.addLast( (RubyObject)array );
}
public RubyObject objectDone(){
RubyObject o =_stack.removeLast();
if ( _nameStack.size() > 0 )
_nameStack.removeLast();
else if ( _stack.size() > 0 ) {
throw new IllegalStateException( "Error! An illegal state ocurred." );
}
return o;
}
public void arrayStart(){
throw new UnsupportedOperationException();
}
public RubyObject arrayDone(){
return objectDone();
}
public void gotNull( String name ){
_put(name, (RubyObject)_runtime.getNil());
}
@Deprecated
public void gotUndefined( String name ) { }
public void gotUUID( String name , long part1, long part2) {
throw new UnsupportedOperationException();
}
public void gotCode( String name , String code ){
RubyString code_string = _runtime.newString( code );
Object rb_code_obj = JavaEmbedUtils.invokeMethod(_runtime, _rbclsCode,
"new", new Object[] { code_string }, Object.class);
_put( name , (RubyObject)rb_code_obj );
}
public void gotCodeWScope( String name , String code , Object scope ){
RubyString code_string = _runtime.newString( code );
Object rb_code_obj = JavaEmbedUtils.invokeMethod(_runtime, _rbclsCode,
"new", new Object[] { code_string, (RubyHash)scope }, Object.class);
_put( name , (RubyObject)rb_code_obj );
}
public void gotMinKey( String name ){
Object minkey = JavaEmbedUtils.invokeMethod(_runtime, _rbclsMinKey, "new", new Object[] {}, Object.class);
_put( name, (RubyObject)minkey);
}
public void gotMaxKey( String name ){
Object maxkey = JavaEmbedUtils.invokeMethod(_runtime, _rbclsMaxKey, "new", new Object[] {}, Object.class);
_put( name, (RubyObject)maxkey);
}
public void gotBoolean( String name , boolean v ){
RubyBoolean b = RubyBoolean.newBoolean( _runtime, v );
_put(name , b);
}
public void gotDouble( String name , double v ){
RubyFloat f = new RubyFloat( _runtime, v );
_put(name , (RubyObject)f);
}
public void gotInt( String name , int v ){
RubyFixnum f = new RubyFixnum( _runtime, v );
_put(name , (RubyObject)f);
}
public void gotLong( String name , long v ){
RubyFixnum f = new RubyFixnum( _runtime, v );
_put(name , (RubyObject)f);
}
public void gotDate( String name , long millis ){
RubyTime time = RubyTime.newTime(_runtime, millis).gmtime();
_put( name , time );
}
public void gotRegex( String name , String pattern , String flags ){
RubyObject[] args = new RubyObject[2];
args[0] = RubyString.newString(_runtime, pattern);
args[1] = RubyString.newString(_runtime, flags);
Object result = JavaEmbedUtils.invokeMethod(_runtime, _rbclsBSONRegex, "new", args, Object.class);
RubySymbol rkey = RubySymbol.newSymbol(_runtime, "compile_regex");
if (!_opts.containsKey(rkey) || (Boolean)_opts.get(rkey)) {
result = JavaEmbedUtils.invokeMethod(_runtime, result, "try_compile", new Object[] {}, Object.class);
}
_put( name, (RubyObject)result );
}
public void gotString( String name , String v ){
RubyString str = RubyString.newString(_runtime, v);
_put( name , str );
}
public void gotSymbol( String name , String v ){
ByteList bytes = new ByteList(v.getBytes());
RubySymbol symbol = _runtime.getSymbolTable().getSymbol(bytes);
_put( name , symbol );
}
public void gotTimestamp( String name , int time , int inc ){
RubyFixnum rtime = RubyFixnum.newFixnum( _runtime, time );
RubyFixnum rinc = RubyFixnum.newFixnum( _runtime, inc );
RubyObject[] args = new RubyObject[2];
args[0] = rtime;
args[1] = rinc;
Object result = JavaEmbedUtils.invokeMethod(_runtime, _rbclsTimestamp, "new", args, Object.class);
_put ( name , (RubyObject)result );
}
public void gotObjectId( String name , ObjectId id ){
byte[] jbytes = id.toByteArray();
RubyArray arg = RubyArray.newArray( _runtime, 12 );
for( int i=0; i<jbytes.length; i++) {
arg.store( i, _runtime.newFixnum(jbytes[i] & 0xFF) );
}
Object[] args = new Object[] { arg };
Object result = JavaEmbedUtils.invokeMethod(_runtime, _rbclsObjectId, "new", args, Object.class);
_put( name, (RubyObject)result );
}
@Deprecated
public void gotDBRef( String name , String ns , ObjectId id ){
throw new UnsupportedOperationException();
}
private RubyArray ja2ra( byte[] b ) {
RubyArray result = RubyArray.newArray( _runtime, b.length );
for ( int i=0; i<b.length; i++ ) {
result.store( i, _runtime.newFixnum(b[i]) );
}
return result;
}
@Deprecated
public void gotBinaryArray( String name , byte[] b ) {
throw new UnsupportedOperationException();
}
public void gotBinary( String name , byte type , byte[] data ){
RubyArray a = ja2ra( data );
Object[] args = new Object[] { a, RubyFixnum.newFixnum(_runtime, Math.abs( type )) };
Object result = JavaEmbedUtils.invokeMethod(_runtime, _rbclsBinary, "new", args, Object.class);
_put( name, (RubyObject)result );
}
protected void _put( String name , RubyObject o ){
RubyObject current = cur();
if(current instanceof RubyArray) {
RubyArray a = (RubyArray)current;
Long n = Long.parseLong(name);
a.store(n, (IRubyObject)o);
}
else {
RubyString rkey = RubyString.newString(_runtime, name);
JavaEmbedUtils.invokeMethod(_runtime, current, "[]=",
new Object[] { (IRubyObject)rkey, o }, Object.class);
}
}
protected RubyObject cur(){
return _stack.getLast();
}
public Object get(){
return _root;
}
protected void setRoot(RubyHash o) {
_root = o;
}
protected boolean isStackEmpty() {
return _stack.size() < 1;
}
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.sp.jobmanager.core;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.wso2.carbon.sp.jobmanager.core.appcreator.DeployableSiddhiQueryGroup;
import org.wso2.carbon.sp.jobmanager.core.appcreator.DistributedSiddhiQuery;
import org.wso2.carbon.sp.jobmanager.core.bean.ClusterConfig;
import org.wso2.carbon.sp.jobmanager.core.bean.DeploymentConfig;
import org.wso2.carbon.sp.jobmanager.core.bean.InterfaceConfig;
import org.wso2.carbon.sp.jobmanager.core.bean.StrategyConfig;
import org.wso2.carbon.sp.jobmanager.core.model.ManagerNode;
import org.wso2.carbon.sp.jobmanager.core.model.ManagerNodeConfig;
import org.wso2.carbon.sp.jobmanager.core.model.ResourceNode;
import org.wso2.carbon.sp.jobmanager.core.model.SiddhiAppHolder;
import org.wso2.carbon.sp.jobmanager.core.util.TypeConverter;
import java.util.ArrayList;
import java.util.List;
public class BeanTestCase {
@Test
public void testClusterConfigBean() {
StrategyConfig strategyConfig = new StrategyConfig();
strategyConfig.setDatasource("WSO2_DS");
strategyConfig.setEventPollingInterval(500);
strategyConfig.setHeartbeatInterval(1000);
strategyConfig.setHeartbeatMaxRetry(3);
ClusterConfig clusterConfig = new ClusterConfig();
clusterConfig.setStrategyConfig(strategyConfig);
clusterConfig.setCoordinationStrategyClass("distributed");
clusterConfig.setEnabled(true);
clusterConfig.setGroupId("group-1");
Assert.assertEquals(clusterConfig.getCoordinationStrategyClass(), "distributed");
Assert.assertEquals(clusterConfig.getGroupId(), "group-1");
Assert.assertEquals(clusterConfig.getStrategyConfig().getDatasource(), "WSO2_DS");
Assert.assertEquals(clusterConfig.getStrategyConfig().getEventPollingInterval(), 500);
Assert.assertEquals(clusterConfig.getStrategyConfig().getHeartbeatInterval(), 1000);
Assert.assertEquals(clusterConfig.getStrategyConfig().getHeartbeatMaxRetry(), 3);
Assert.assertTrue(clusterConfig.isEnabled());
}
@Test
public void testDeploymentConfig() {
InterfaceConfig interfaceConfig = new InterfaceConfig();
interfaceConfig.setHost("localhost");
interfaceConfig.setPort(9091);
DeploymentConfig deploymentConfig = new DeploymentConfig();
deploymentConfig.setBootstrapURLs("localhost:9090");
deploymentConfig.setDatasource("WSO2_DS");
deploymentConfig.setHeartbeatInterval(500);
deploymentConfig.setHeartbeatMaxRetry(3);
deploymentConfig.setMinResourceCount(2);
deploymentConfig.setType("distributed");
deploymentConfig.setHttpInterface(interfaceConfig);
Assert.assertEquals(deploymentConfig.getBootstrapURLs(), "localhost:9090");
Assert.assertEquals(deploymentConfig.getDatasource(), "WSO2_DS");
Assert.assertEquals(deploymentConfig.getHeartbeatInterval(), 500);
Assert.assertEquals(deploymentConfig.getHeartbeatMaxRetry(), 3);
Assert.assertEquals(deploymentConfig.getMinResourceCount(), 2);
Assert.assertEquals(deploymentConfig.getType(), "distributed");
Assert.assertEquals(deploymentConfig.getHttpInterface().getHost(), "localhost");
Assert.assertEquals(deploymentConfig.getHttpInterface().getPort(), 9091);
Assert.assertEquals(deploymentConfig.getHttpInterface().toString(),
"Interface { host: localhost, port: 9091 }");
InterfaceConfig similarInterfaceConfig = new InterfaceConfig();
similarInterfaceConfig.setHost("localhost");
similarInterfaceConfig.setPort(9091);
InterfaceConfig differentInterfaceConfig = new InterfaceConfig();
differentInterfaceConfig.setHost("192.168.1.1");
differentInterfaceConfig.setPort(9091);
Assert.assertTrue(interfaceConfig.equals(similarInterfaceConfig));
Assert.assertTrue(interfaceConfig.equals(interfaceConfig));
Assert.assertFalse(interfaceConfig.equals(null));
Assert.assertFalse(interfaceConfig.equals(new InterfaceConfig()));
Assert.assertFalse(interfaceConfig.equals(differentInterfaceConfig));
org.wso2.carbon.sp.jobmanager.core.model.InterfaceConfig iConfigModel = TypeConverter.convert(interfaceConfig);
Assert.assertEquals(iConfigModel.getHost(), interfaceConfig.getHost());
Assert.assertEquals(iConfigModel.getPort(), Integer.valueOf(interfaceConfig.getPort()));
InterfaceConfig convertedInterfaceConfig = TypeConverter.convert(iConfigModel);
Assert.assertEquals(convertedInterfaceConfig.getHost(), interfaceConfig.getHost());
Assert.assertEquals(convertedInterfaceConfig.getPort(), convertedInterfaceConfig.getPort());
}
@Test
public void testDistributedSiddhiQuery() {
DeployableSiddhiQueryGroup deployableSiddhiQueryGroup = new DeployableSiddhiQueryGroup("group-1");
DeployableSiddhiQueryGroup deployableSiddhiQueryGroup2 = new DeployableSiddhiQueryGroup("group-2");
Assert.assertTrue(deployableSiddhiQueryGroup.equals(deployableSiddhiQueryGroup));
Assert.assertFalse(deployableSiddhiQueryGroup.equals(null));
Assert.assertFalse(deployableSiddhiQueryGroup.equals(deployableSiddhiQueryGroup2));
List<DeployableSiddhiQueryGroup> queryGroups = new ArrayList<>();
queryGroups.add(deployableSiddhiQueryGroup);
queryGroups.add(deployableSiddhiQueryGroup2);
DistributedSiddhiQuery distributedSiddhiQuery = new DistributedSiddhiQuery("app-1", queryGroups);
Assert.assertEquals(distributedSiddhiQuery.getAppName(), "app-1");
distributedSiddhiQuery.setAppName("app-2");
Assert.assertEquals(distributedSiddhiQuery.getAppName(), "app-2");
Assert.assertTrue(distributedSiddhiQuery.equals(distributedSiddhiQuery));
Assert.assertFalse(distributedSiddhiQuery.equals(null));
Assert.assertFalse(distributedSiddhiQuery.equals(new DistributedSiddhiQuery("app-2", null)));
Assert.assertTrue(distributedSiddhiQuery.getQueryGroups().equals(queryGroups));
}
@Test
public void testManagerNodeModel() {
InterfaceConfig interfaceConfig = new InterfaceConfig();
interfaceConfig.setHost("localhost");
interfaceConfig.setPort(9091);
ManagerNode managerNode = new ManagerNode();
managerNode.setId("manager-1").setHeartbeatInterval(1000).setHeartbeatMaxRetry(3).
setHttpInterface(interfaceConfig);
Assert.assertEquals(managerNode.getId(), "manager-1");
Assert.assertEquals(managerNode.getHeartbeatInterval(), 1000);
Assert.assertEquals(managerNode.getHeartbeatMaxRetry(), 3);
Assert.assertEquals(managerNode.getHttpInterface(), interfaceConfig);
Assert.assertEquals(managerNode.toString(), "ManagerNode { id: manager-1, host: localhost, port: 9091 }");
ManagerNodeConfig managerNodeConfig = TypeConverter.convert(managerNode);
Assert.assertEquals(managerNodeConfig.getHeartbeatInterval(), Integer.valueOf(1000));
Assert.assertEquals(managerNodeConfig.getHeartbeatMaxRetry(), Integer.valueOf(3));
Assert.assertEquals(managerNodeConfig.getId(), "manager-1");
Assert.assertEquals(managerNodeConfig.getHttpInterface().getHost(), interfaceConfig.getHost());
Assert.assertEquals(managerNodeConfig.getHttpInterface().getPort(), Integer.valueOf(interfaceConfig.getPort()));
ManagerNode convertedManagerNode = TypeConverter.convert(managerNodeConfig);
Assert.assertEquals(convertedManagerNode.getId(), managerNode.getId());
}
@Test
public void testResourceNodeModel() {
InterfaceConfig interfaceConfig = new InterfaceConfig();
interfaceConfig.setHost("localhost");
interfaceConfig.setPort(9091);
ResourceNode resourceNode = new ResourceNode("resource-1");
resourceNode.setHttpInterface(interfaceConfig);
resourceNode.setState("NEW");
Assert.assertEquals(resourceNode.getId(), "resource-1");
resourceNode.setId("resource-2");
Assert.assertEquals(resourceNode.getId(), "resource-2");
Assert.assertEquals(resourceNode.getState(), "NEW");
Assert.assertEquals(resourceNode.getHttpInterface(), interfaceConfig);
Assert.assertEquals(resourceNode.getFailedPingAttempts(), 0);
resourceNode.incrementFailedPingAttempts();
Assert.assertEquals(resourceNode.getFailedPingAttempts(), 1);
resourceNode.resetFailedPingAttempts();
Assert.assertEquals(resourceNode.getFailedPingAttempts(), 0);
long timestamp = System.currentTimeMillis();
Assert.assertTrue(resourceNode.getLastPingTimestamp() <= timestamp);
resourceNode.updateLastPingTimestamp();
Assert.assertTrue(resourceNode.getLastPingTimestamp() >= timestamp);
Assert.assertEquals(resourceNode.toString(), "ResourceNode { id: resource-2, host: localhost, port: 9091 }");
Assert.assertTrue(resourceNode.equals(resourceNode));
Assert.assertFalse(resourceNode.equals(null));
Assert.assertFalse(resourceNode.equals(new ResourceNode("resource-3")));
}
@Test
public void testSiddhiAppHolderModel() {
InterfaceConfig interfaceConfig = new InterfaceConfig();
interfaceConfig.setHost("localhost");
interfaceConfig.setPort(9091);
ResourceNode resourceNode = new ResourceNode("resource-1");
SiddhiAppHolder siddhiAppHolder = new SiddhiAppHolder(
"parentAppName", "group-1", "app-1",
"@App:name('app-1')", resourceNode);
Assert.assertEquals(siddhiAppHolder.getParentAppName(), "parentAppName");
Assert.assertEquals(siddhiAppHolder.getGroupName(), "group-1");
Assert.assertEquals(siddhiAppHolder.getAppName(), "app-1");
Assert.assertEquals(siddhiAppHolder.getSiddhiApp(), "@App:name('app-1')");
Assert.assertEquals(siddhiAppHolder.getDeployedNode(), resourceNode);
siddhiAppHolder.setParentAppName("parent2AppName");
siddhiAppHolder.setGroupName("group-2");
siddhiAppHolder.setAppName("app-2");
siddhiAppHolder.setSiddhiApp("@App:name('app-2')");
siddhiAppHolder.setDeployedNode(new ResourceNode("resource-2"));
Assert.assertEquals(siddhiAppHolder.getParentAppName(), "parent2AppName");
Assert.assertEquals(siddhiAppHolder.getGroupName(), "group-2");
Assert.assertEquals(siddhiAppHolder.getAppName(), "app-2");
Assert.assertEquals(siddhiAppHolder.getSiddhiApp(), "@App:name('app-2')");
Assert.assertFalse(siddhiAppHolder.getDeployedNode().equals(resourceNode));
Assert.assertFalse(siddhiAppHolder.equals(null));
Assert.assertTrue(siddhiAppHolder.equals(siddhiAppHolder));
Assert.assertFalse(siddhiAppHolder.equals(new SiddhiAppHolder(
"parent2AppName", null, null, null, null)));
Assert.assertFalse(siddhiAppHolder.equals(
new SiddhiAppHolder("parent2AppName", "group-2", null, null,
null)));
Assert.assertFalse(siddhiAppHolder.equals(new SiddhiAppHolder(
"parent2AppName", "group-2", "app-2", null, null)));
Assert.assertTrue(siddhiAppHolder.equals(new SiddhiAppHolder(
"parent2AppName", "group-2", "app-2",
"@App:name('app-2')", null)));
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType.ABSTRACT;
import static com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType.TEST;
import com.google.common.base.Preconditions;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.analysis.buildinfo.BuildInfoFactory;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.ConfigurationFragmentFactory;
import com.google.devtools.build.lib.analysis.config.DefaultsPackage;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.graph.Digraph;
import com.google.devtools.build.lib.graph.Node;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.NativeAspectClass;
import com.google.devtools.build.lib.packages.NonconfigurableAttributeMapper;
import com.google.devtools.build.lib.packages.OutputFile;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClassProvider;
import com.google.devtools.build.lib.packages.RuleErrorConsumer;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.rules.RuleConfiguredTargetFactory;
import com.google.devtools.build.lib.rules.SkylarkModules;
import com.google.devtools.build.lib.runtime.proto.InvocationPolicyOuterClass.InvocationPolicy;
import com.google.devtools.build.lib.syntax.Environment;
import com.google.devtools.build.lib.syntax.Environment.Extension;
import com.google.devtools.build.lib.syntax.Environment.Phase;
import com.google.devtools.build.lib.syntax.Mutability;
import com.google.devtools.build.lib.syntax.SkylarkUtils;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.common.options.OptionsClassProvider;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.annotation.Nullable;
/**
* Knows about every rule Blaze supports and the associated configuration options.
*
* <p>This class is initialized on server startup and the set of rules, build info factories
* and configuration options is guarantees not to change over the life time of the Blaze server.
*/
public class ConfiguredRuleClassProvider implements RuleClassProvider {
/**
* Custom dependency validation logic.
*/
public interface PrerequisiteValidator {
/**
* Checks whether the rule in {@code contextBuilder} is allowed to depend on
* {@code prerequisite} through the attribute {@code attribute}.
*
* <p>Can be used for enforcing any organization-specific policies about the layout of the
* workspace.
*/
void validate(
RuleContext.Builder contextBuilder, ConfiguredTarget prerequisite, Attribute attribute);
}
/** Validator to check for and warn on the deprecation of dependencies. */
public static final class DeprecationValidator implements PrerequisiteValidator {
/** Checks if the given prerequisite is deprecated and prints a warning if so. */
@Override
public void validate(
RuleContext.Builder contextBuilder, ConfiguredTarget prerequisite, Attribute attribute) {
validateDirectPrerequisiteForDeprecation(
contextBuilder, contextBuilder.getRule(), prerequisite, contextBuilder.forAspect());
}
/**
* Returns whether two packages are considered the same for purposes of deprecation warnings.
* Dependencies within the same package do not print deprecation warnings; a package in the
* javatests directory may also depend on its corresponding java package without a warning.
*/
public static boolean isSameLogicalPackage(
PackageIdentifier thisPackage, PackageIdentifier prerequisitePackage) {
if (thisPackage.equals(prerequisitePackage)) {
// If the packages are equal, they are the same logical package (and just the same package).
return true;
}
if (!thisPackage.getRepository().equals(prerequisitePackage.getRepository())) {
// If the packages are in different repositories, they are not the same logical package.
return false;
}
// If the packages are in the same repository, it's allowed iff this package is the javatests
// companion to the prerequisite java package.
String thisPackagePath = thisPackage.getPackageFragment().getPathString();
String prerequisitePackagePath = prerequisitePackage.getPackageFragment().getPathString();
return thisPackagePath.startsWith("javatests/")
&& prerequisitePackagePath.startsWith("java/")
&& thisPackagePath.substring("javatests/".length()).equals(
prerequisitePackagePath.substring("java/".length()));
}
/** Returns whether a deprecation warning should be printed for the prerequisite described. */
private static boolean shouldEmitDeprecationWarningFor(
String thisDeprecation, PackageIdentifier thisPackage,
String prerequisiteDeprecation, PackageIdentifier prerequisitePackage,
boolean forAspect) {
// Don't report deprecation edges from javatests to java or within a package;
// otherwise tests of deprecated code generate nuisance warnings.
// Don't report deprecation if the current target is also deprecated,
// or if the current context is evaluating an aspect,
// as the base target would have already printed the deprecation warnings.
return (!forAspect
&& prerequisiteDeprecation != null
&& !isSameLogicalPackage(thisPackage, prerequisitePackage)
&& thisDeprecation == null);
}
/** Checks if the given prerequisite is deprecated and prints a warning if so. */
public static void validateDirectPrerequisiteForDeprecation(
RuleErrorConsumer errors, Rule rule, ConfiguredTarget prerequisite, boolean forAspect) {
Target prerequisiteTarget = prerequisite.getTarget();
Label prerequisiteLabel = prerequisiteTarget.getLabel();
PackageIdentifier thatPackage = prerequisiteLabel.getPackageIdentifier();
PackageIdentifier thisPackage = rule.getLabel().getPackageIdentifier();
if (prerequisiteTarget instanceof Rule) {
Rule prerequisiteRule = (Rule) prerequisiteTarget;
String thisDeprecation =
NonconfigurableAttributeMapper.of(rule).get("deprecation", Type.STRING);
String thatDeprecation =
NonconfigurableAttributeMapper.of(prerequisiteRule).get("deprecation", Type.STRING);
if (shouldEmitDeprecationWarningFor(
thisDeprecation, thisPackage, thatDeprecation, thatPackage, forAspect)) {
errors.ruleWarning("target '" + rule.getLabel() + "' depends on deprecated target '"
+ prerequisiteLabel + "': " + thatDeprecation);
}
}
if (prerequisiteTarget instanceof OutputFile) {
Rule generatingRule = ((OutputFile) prerequisiteTarget).getGeneratingRule();
String thisDeprecation =
NonconfigurableAttributeMapper.of(rule).get("deprecation", Type.STRING);
String thatDeprecation =
NonconfigurableAttributeMapper.of(generatingRule).get("deprecation", Type.STRING);
if (shouldEmitDeprecationWarningFor(
thisDeprecation, thisPackage, thatDeprecation, thatPackage, forAspect)) {
errors.ruleWarning("target '" + rule.getLabel() + "' depends on the output file "
+ prerequisiteLabel + " of a deprecated rule " + generatingRule.getLabel()
+ "': " + thatDeprecation);
}
}
}
}
/**
* A coherent set of options, fragments, aspects and rules; each of these may declare a dependency
* on other such sets.
*/
public static interface RuleSet {
/** Add stuff to the configured rule class provider builder. */
void init(ConfiguredRuleClassProvider.Builder builder);
/** List of required modules. */
ImmutableList<RuleSet> requires();
}
/** Builder for {@link ConfiguredRuleClassProvider}. */
public static class Builder implements RuleDefinitionEnvironment {
private String productName;
private final StringBuilder defaultWorkspaceFilePrefix = new StringBuilder();
private final StringBuilder defaultWorkspaceFileSuffix = new StringBuilder();
private Label preludeLabel;
private String runfilesPrefix;
private String toolsRepository;
private final List<ConfigurationFragmentFactory> configurationFragmentFactories =
new ArrayList<>();
private final List<BuildInfoFactory> buildInfoFactories = new ArrayList<>();
private final List<Class<? extends FragmentOptions>> configurationOptions = new ArrayList<>();
private final Map<String, RuleClass> ruleClassMap = new HashMap<>();
private final Map<String, RuleDefinition> ruleDefinitionMap = new HashMap<>();
private final Map<String, NativeAspectClass> nativeAspectClassMap =
new HashMap<>();
private final Map<Class<? extends RuleDefinition>, RuleClass> ruleMap = new HashMap<>();
private final Digraph<Class<? extends RuleDefinition>> dependencyGraph =
new Digraph<>();
private ConfigurationCollectionFactory configurationCollectionFactory;
private Class<? extends BuildConfiguration.Fragment> universalFragment;
private PrerequisiteValidator prerequisiteValidator;
private ImmutableMap.Builder<String, Object> skylarkAccessibleTopLevels =
ImmutableMap.builder();
private ImmutableList.Builder<Class<?>> skylarkModules =
ImmutableList.<Class<?>>builder().addAll(SkylarkModules.MODULES);
private ImmutableBiMap.Builder<String, Class<? extends TransitiveInfoProvider>>
registeredSkylarkProviders = ImmutableBiMap.builder();
private Map<String, String> platformRegexps = new TreeMap<>();
public Builder setProductName(String productName) {
this.productName = productName;
return this;
}
public void addWorkspaceFilePrefix(String contents) {
defaultWorkspaceFilePrefix.append(contents);
}
public void addWorkspaceFileSuffix(String contents) {
defaultWorkspaceFileSuffix.append(contents);
}
public Builder setPrelude(String preludeLabelString) {
try {
this.preludeLabel = Label.parseAbsolute(preludeLabelString);
} catch (LabelSyntaxException e) {
String errorMsg =
String.format("Prelude label '%s' is invalid: %s", preludeLabelString, e.getMessage());
throw new IllegalArgumentException(errorMsg);
}
return this;
}
public Builder setRunfilesPrefix(String runfilesPrefix) {
this.runfilesPrefix = runfilesPrefix;
return this;
}
public Builder setToolsRepository(String toolsRepository) {
this.toolsRepository = toolsRepository;
return this;
}
public Builder setPrerequisiteValidator(PrerequisiteValidator prerequisiteValidator) {
this.prerequisiteValidator = prerequisiteValidator;
return this;
}
public Builder addBuildInfoFactory(BuildInfoFactory factory) {
buildInfoFactories.add(factory);
return this;
}
public Builder addRuleDefinition(RuleDefinition ruleDefinition) {
Class<? extends RuleDefinition> ruleDefinitionClass = ruleDefinition.getClass();
ruleDefinitionMap.put(ruleDefinitionClass.getName(), ruleDefinition);
dependencyGraph.createNode(ruleDefinitionClass);
for (Class<? extends RuleDefinition> ancestor : ruleDefinition.getMetadata().ancestors()) {
dependencyGraph.addEdge(ancestor, ruleDefinitionClass);
}
return this;
}
public Builder addNativeAspectClass(NativeAspectClass aspectFactoryClass) {
nativeAspectClassMap.put(aspectFactoryClass.getName(), aspectFactoryClass);
return this;
}
public Builder addConfigurationOptions(Class<? extends FragmentOptions> configurationOptions) {
this.configurationOptions.add(configurationOptions);
return this;
}
/**
* Adds an options class and a corresponding factory. There's usually a 1:1:1 correspondence
* between option classes, factories, and fragments, such that the factory depends only on the
* options class and creates the fragment. This method provides a convenient way of adding both
* the options class and the factory in a single call.
*/
public Builder addConfig(
Class<? extends FragmentOptions> options, ConfigurationFragmentFactory factory) {
// Enforce that the factory requires the options.
Preconditions.checkState(factory.requiredOptions().contains(options));
this.configurationOptions.add(options);
this.configurationFragmentFactories.add(factory);
return this;
}
public Builder addConfigurationOptions(
Collection<Class<? extends FragmentOptions>> optionsClasses) {
this.configurationOptions.addAll(optionsClasses);
return this;
}
public Builder addConfigurationFragment(ConfigurationFragmentFactory factory) {
configurationFragmentFactories.add(factory);
return this;
}
public Builder setConfigurationCollectionFactory(ConfigurationCollectionFactory factory) {
this.configurationCollectionFactory = factory;
return this;
}
public Builder setUniversalConfigurationFragment(
Class<? extends BuildConfiguration.Fragment> fragment) {
this.universalFragment = fragment;
return this;
}
public Builder addSkylarkAccessibleTopLevels(String name, Object object) {
this.skylarkAccessibleTopLevels.put(name, object);
return this;
}
public Builder addSkylarkModule(Class<?>... modules) {
this.skylarkModules.add(modules);
return this;
}
/**
* Adds a mapping that determines which keys in structs returned by skylark rules should be
* interpreted as native TransitiveInfoProvider instances of type (map value).
*/
public Builder registerSkylarkProvider(
String name, Class<? extends TransitiveInfoProvider> provider) {
this.registeredSkylarkProviders.put(name, provider);
return this;
}
/**
* Do not use - this only exists for backwards compatibility! Platform regexps are part of a
* legacy mechanism - {@code vardef} - that is not exposed in Bazel.
*
* <p>{@code vardef} needs explicit support in the rule implementations, and cannot express
* conditional dependencies, only conditional attribute values. This mechanism will be
* supplanted by configuration dependent attributes, and its effect can usually also be achieved
* with select().
*
* <p>This is a map of platform names to regexps. When a name is used as the third argument to
* {@code vardef}, the corresponding regexp is used to match on the C++ abi, and the variable is
* only set to that value if the regexp matches. For example, the entry
* {@code "oldlinux": "i[34]86-libc[345]-linux"} might define a set of platforms representing
* certain older linux releases.
*/
public Builder addPlatformRegexps(Map<String, String> platformRegexps) {
this.platformRegexps.putAll(Preconditions.checkNotNull(platformRegexps));
return this;
}
private RuleConfiguredTargetFactory createFactory(
Class<? extends RuleConfiguredTargetFactory> factoryClass) {
try {
Constructor<? extends RuleConfiguredTargetFactory> ctor = factoryClass.getConstructor();
return ctor.newInstance();
} catch (NoSuchMethodException | IllegalAccessException | InstantiationException
| InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
private RuleClass commitRuleDefinition(Class<? extends RuleDefinition> definitionClass) {
RuleDefinition instance = checkNotNull(ruleDefinitionMap.get(definitionClass.getName()),
"addRuleDefinition(new %s()) should be called before build()", definitionClass.getName());
RuleDefinition.Metadata metadata = instance.getMetadata();
checkArgument(ruleClassMap.get(metadata.name()) == null, metadata.name());
List<Class<? extends RuleDefinition>> ancestors = metadata.ancestors();
checkArgument(
metadata.type() == ABSTRACT ^ metadata.factoryClass()
!= RuleConfiguredTargetFactory.class);
checkArgument(
(metadata.type() != TEST)
|| ancestors.contains(BaseRuleClasses.TestBaseRule.class));
RuleClass[] ancestorClasses = new RuleClass[ancestors.size()];
for (int i = 0; i < ancestorClasses.length; i++) {
ancestorClasses[i] = ruleMap.get(ancestors.get(i));
if (ancestorClasses[i] == null) {
// Ancestors should have been initialized by now
throw new IllegalStateException("Ancestor " + ancestors.get(i) + " of "
+ metadata.name() + " is not initialized");
}
}
RuleConfiguredTargetFactory factory = null;
if (metadata.type() != ABSTRACT) {
factory = createFactory(metadata.factoryClass());
}
RuleClass.Builder builder = new RuleClass.Builder(
metadata.name(), metadata.type(), false, ancestorClasses);
builder.factory(factory);
RuleClass ruleClass = instance.build(builder, this);
ruleMap.put(definitionClass, ruleClass);
ruleClassMap.put(ruleClass.getName(), ruleClass);
ruleDefinitionMap.put(ruleClass.getName(), instance);
return ruleClass;
}
public ConfiguredRuleClassProvider build() {
for (Node<Class<? extends RuleDefinition>> ruleDefinition :
dependencyGraph.getTopologicalOrder()) {
commitRuleDefinition(ruleDefinition.getLabel());
}
return new ConfiguredRuleClassProvider(
productName,
preludeLabel,
runfilesPrefix,
toolsRepository,
ImmutableMap.copyOf(ruleClassMap),
ImmutableMap.copyOf(ruleDefinitionMap),
ImmutableMap.copyOf(nativeAspectClassMap),
defaultWorkspaceFilePrefix.toString(),
defaultWorkspaceFileSuffix.toString(),
ImmutableList.copyOf(buildInfoFactories),
ImmutableList.copyOf(configurationOptions),
ImmutableList.copyOf(configurationFragmentFactories),
configurationCollectionFactory,
universalFragment,
prerequisiteValidator,
skylarkAccessibleTopLevels.build(),
skylarkModules.build(),
registeredSkylarkProviders.build());
}
@Override
public Label getLabel(String labelValue) {
return LABELS.getUnchecked(labelValue);
}
@Override
public Label getToolsLabel(String labelValue) {
return getLabel(toolsRepository + labelValue);
}
@Override
public String getToolsRepository() {
return toolsRepository;
}
@Nullable
public Map<String, String> getPlatformRegexps() {
return platformRegexps.isEmpty() ? null : ImmutableMap.copyOf(platformRegexps);
}
}
/**
* Used to make the label instances unique, so that we don't create a new
* instance for every rule.
*/
private static final LoadingCache<String, Label> LABELS = CacheBuilder.newBuilder().build(
new CacheLoader<String, Label>() {
@Override
public Label load(String from) {
try {
return Label.parseAbsolute(from);
} catch (LabelSyntaxException e) {
throw new IllegalArgumentException(from, e);
}
}
});
private final String productName;
/**
* Default content that should be added at the beginning of the WORKSPACE file.
*/
private final String defaultWorkspaceFilePrefix;
/**
* Default content that should be added at the end of the WORKSPACE file.
*/
private final String defaultWorkspaceFileSuffix;
/**
* Label for the prelude file.
*/
private final Label preludeLabel;
/**
* The default runfiles prefix.
*/
private final String runfilesPrefix;
/**
* The path to the tools repository.
*/
private final String toolsRepository;
/**
* Maps rule class name to the metaclass instance for that rule.
*/
private final ImmutableMap<String, RuleClass> ruleClassMap;
/**
* Maps rule class name to the rule definition objects.
*/
private final ImmutableMap<String, RuleDefinition> ruleDefinitionMap;
/**
* Maps aspect name to the aspect factory meta class.
*/
private final ImmutableMap<String, NativeAspectClass> nativeAspectClassMap;
/**
* The configuration options that affect the behavior of the rules.
*/
private final ImmutableList<Class<? extends FragmentOptions>> configurationOptions;
/** The set of configuration fragment factories. */
private final ImmutableList<ConfigurationFragmentFactory> configurationFragmentFactories;
/**
* The factory that creates the configuration collection.
*/
private final ConfigurationCollectionFactory configurationCollectionFactory;
/**
* A configuration fragment that should be available to all rules even when they don't
* explicitly require it.
*/
private final Class<? extends BuildConfiguration.Fragment> universalFragment;
private final ImmutableList<BuildInfoFactory> buildInfoFactories;
private final PrerequisiteValidator prerequisiteValidator;
private final Environment.Frame globals;
private final ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>>
registeredSkylarkProviders;
private ConfiguredRuleClassProvider(
String productName,
Label preludeLabel,
String runfilesPrefix,
String toolsRepository,
ImmutableMap<String, RuleClass> ruleClassMap,
ImmutableMap<String, RuleDefinition> ruleDefinitionMap,
ImmutableMap<String, NativeAspectClass> nativeAspectClassMap,
String defaultWorkspaceFilePrefix,
String defaultWorkspaceFileSuffix,
ImmutableList<BuildInfoFactory> buildInfoFactories,
ImmutableList<Class<? extends FragmentOptions>> configurationOptions,
ImmutableList<ConfigurationFragmentFactory> configurationFragments,
ConfigurationCollectionFactory configurationCollectionFactory,
Class<? extends BuildConfiguration.Fragment> universalFragment,
PrerequisiteValidator prerequisiteValidator,
ImmutableMap<String, Object> skylarkAccessibleJavaClasses,
ImmutableList<Class<?>> skylarkModules,
ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>> registeredSkylarkProviders) {
this.productName = productName;
this.preludeLabel = preludeLabel;
this.runfilesPrefix = runfilesPrefix;
this.toolsRepository = toolsRepository;
this.ruleClassMap = ruleClassMap;
this.ruleDefinitionMap = ruleDefinitionMap;
this.nativeAspectClassMap = nativeAspectClassMap;
this.defaultWorkspaceFilePrefix = defaultWorkspaceFilePrefix;
this.defaultWorkspaceFileSuffix = defaultWorkspaceFileSuffix;
this.buildInfoFactories = buildInfoFactories;
this.configurationOptions = configurationOptions;
this.configurationFragmentFactories = configurationFragments;
this.configurationCollectionFactory = configurationCollectionFactory;
this.universalFragment = universalFragment;
this.prerequisiteValidator = prerequisiteValidator;
this.globals = createGlobals(skylarkAccessibleJavaClasses, skylarkModules);
this.registeredSkylarkProviders = registeredSkylarkProviders;
}
public String getProductName() {
return productName;
}
public PrerequisiteValidator getPrerequisiteValidator() {
return prerequisiteValidator;
}
@Override
public Label getPreludeLabel() {
return preludeLabel;
}
@Override
public String getRunfilesPrefix() {
return runfilesPrefix;
}
@Override
public String getToolsRepository() {
return toolsRepository;
}
@Override
public Map<String, RuleClass> getRuleClassMap() {
return ruleClassMap;
}
@Override
public Map<String, NativeAspectClass> getNativeAspectClassMap() {
return nativeAspectClassMap;
}
@Override
public NativeAspectClass getNativeAspectClass(String key) {
return nativeAspectClassMap.get(key);
}
/**
* Returns a list of build info factories that are needed for the supported languages.
*/
public ImmutableList<BuildInfoFactory> getBuildInfoFactories() {
return buildInfoFactories;
}
/**
* Returns the set of configuration fragments provided by this module.
*/
public ImmutableList<ConfigurationFragmentFactory> getConfigurationFragments() {
return configurationFragmentFactories;
}
/**
* Returns the set of configuration options that are supported in this module.
*/
public ImmutableList<Class<? extends FragmentOptions>> getConfigurationOptions() {
return configurationOptions;
}
/**
* Returns the definition of the rule class definition with the specified name.
*/
public RuleDefinition getRuleClassDefinition(String ruleClassName) {
return ruleDefinitionMap.get(ruleClassName);
}
/**
* Returns the configuration collection creator.
*/
public ConfigurationCollectionFactory getConfigurationCollectionFactory() {
return configurationCollectionFactory;
}
/**
* Returns the configuration fragment that should be available to all rules even when they
* don't explicitly require it.
*/
public Class<? extends BuildConfiguration.Fragment> getUniversalFragment() {
return universalFragment;
}
/**
* Returns the defaults package for the default settings.
*/
public String getDefaultsPackageContent(InvocationPolicy invocationPolicy) {
return DefaultsPackage.getDefaultsPackageContent(configurationOptions, invocationPolicy);
}
/**
* Returns the defaults package for the given options taken from an optionsProvider.
*/
public String getDefaultsPackageContent(OptionsClassProvider optionsProvider) {
return DefaultsPackage.getDefaultsPackageContent(
BuildOptions.of(configurationOptions, optionsProvider));
}
/**
* Returns a map that indicates which keys in structs returned by skylark rules should be
* interpreted as native TransitiveInfoProvider instances of type (map value).
*
* <p>That is, if this map contains "dummy" -> DummyProvider.class, a "dummy" entry in a skylark
* rule implementation's returned struct will be exported from that ConfiguredTarget as a
* DummyProvider.
*/
public ImmutableBiMap<String, Class<? extends TransitiveInfoProvider>>
getRegisteredSkylarkProviders() {
return this.registeredSkylarkProviders;
}
/**
* Creates a BuildOptions class for the given options taken from an optionsProvider.
*/
public BuildOptions createBuildOptions(OptionsClassProvider optionsProvider) {
return BuildOptions.of(configurationOptions, optionsProvider);
}
private Environment.Frame createGlobals(
ImmutableMap<String, Object> skylarkAccessibleToplLevels,
ImmutableList<Class<?>> modules) {
try (Mutability mutability = Mutability.create("ConfiguredRuleClassProvider globals")) {
Environment env = createSkylarkRuleClassEnvironment(
mutability, SkylarkModules.getGlobals(modules), null, null, null);
for (Map.Entry<String, Object> entry : skylarkAccessibleToplLevels.entrySet()) {
env.setup(entry.getKey(), entry.getValue());
}
return env.getGlobals();
}
}
private Environment createSkylarkRuleClassEnvironment(
Mutability mutability,
Environment.Frame globals,
EventHandler eventHandler,
String astFileContentHashCode,
Map<String, Extension> importMap) {
Environment env =
Environment.builder(mutability)
.setSkylark()
.setGlobals(globals)
.setEventHandler(eventHandler)
.setFileContentHashCode(astFileContentHashCode)
.setImportedExtensions(importMap)
.setPhase(Phase.LOADING)
.build();
SkylarkUtils.setToolsRepository(env, toolsRepository);
return env;
}
@Override
public Environment createSkylarkRuleClassEnvironment(
Label extensionLabel, Mutability mutability,
EventHandler eventHandler,
String astFileContentHashCode,
Map<String, Extension> importMap) {
return createSkylarkRuleClassEnvironment(
mutability, globals.setLabel(extensionLabel),
eventHandler, astFileContentHashCode, importMap);
}
@Override
public String getDefaultWorkspacePrefix() {
return defaultWorkspaceFilePrefix;
}
@Override
public String getDefaultWorkspaceSuffix() {
return defaultWorkspaceFileSuffix;
}
/**
* Returns all registered {@link BuildConfiguration.Fragment} classes.
*/
public Set<Class<? extends BuildConfiguration.Fragment>> getAllFragments() {
ImmutableSet.Builder<Class<? extends BuildConfiguration.Fragment>> fragmentsBuilder =
ImmutableSet.builder();
for (ConfigurationFragmentFactory factory : getConfigurationFragments()) {
fragmentsBuilder.add(factory.creates());
}
fragmentsBuilder.add(getUniversalFragment());
return fragmentsBuilder.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.complexscripts.fonts;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fop.complexscripts.scripts.ScriptProcessor;
import org.apache.fop.complexscripts.util.GlyphSequence;
// CSOFF: LineLengthCheck
/**
* <p>The <code>GlyphDefinitionTable</code> class is a glyph table that implements
* glyph definition functionality according to the OpenType GDEF table.</p>
*
* <p>This work was originally authored by Glenn Adams (gadams@apache.org).</p>
*/
public class GlyphDefinitionTable extends GlyphTable {
/** logging instance */
private static final Log log = LogFactory.getLog(GlyphDefinitionTable.class);
/** glyph class subtable type */
public static final int GDEF_LOOKUP_TYPE_GLYPH_CLASS = 1;
/** attachment point subtable type */
public static final int GDEF_LOOKUP_TYPE_ATTACHMENT_POINT = 2;
/** ligature caret subtable type */
public static final int GDEF_LOOKUP_TYPE_LIGATURE_CARET = 3;
/** mark attachment subtable type */
public static final int GDEF_LOOKUP_TYPE_MARK_ATTACHMENT = 4;
/** pre-defined glyph class - base glyph */
public static final int GLYPH_CLASS_BASE = 1;
/** pre-defined glyph class - ligature glyph */
public static final int GLYPH_CLASS_LIGATURE = 2;
/** pre-defined glyph class - mark glyph */
public static final int GLYPH_CLASS_MARK = 3;
/** pre-defined glyph class - component glyph */
public static final int GLYPH_CLASS_COMPONENT = 4;
/** singleton glyph class table */
private GlyphClassSubtable gct;
/** singleton attachment point table */
// private AttachmentPointSubtable apt; // NOT YET USED
/** singleton ligature caret table */
// private LigatureCaretSubtable lct; // NOT YET USED
/** singleton mark attachment table */
private MarkAttachmentSubtable mat;
/**
* Instantiate a <code>GlyphDefinitionTable</code> object using the specified subtables.
* @param subtables a list of identified subtables
*/
public GlyphDefinitionTable(List subtables, Map<String, ScriptProcessor> processors) {
super(null, new HashMap(0), processors);
if ((subtables == null) || (subtables.size() == 0)) {
throw new AdvancedTypographicTableFormatException("subtables must be non-empty");
} else {
for (Object o : subtables) {
if (o instanceof GlyphDefinitionSubtable) {
addSubtable((GlyphSubtable) o);
} else {
throw new AdvancedTypographicTableFormatException("subtable must be a glyph definition subtable");
}
}
freezeSubtables();
}
}
/**
* Reorder combining marks in glyph sequence so that they precede (within the sequence) the base
* character to which they are applied. N.B. In the case of LTR segments, marks are not reordered by this,
* method since when the segment is reversed by BIDI processing, marks are automatically reordered to precede
* their base glyph.
* @param gs an input glyph sequence
* @param widths associated advance widths (also reordered)
* @param gpa associated glyph position adjustments (also reordered)
* @param script a script identifier
* @param language a language identifier
* @param isVertical
* @return the reordered (output) glyph sequence
*/
public GlyphSequence reorderCombiningMarks(GlyphSequence gs, int[] widths, int[][] gpa, String script, String language, boolean isVertical) {
ScriptProcessor sp = ScriptProcessor.getInstance(script, processors, isVertical);
return sp.reorderCombiningMarks(this, gs, widths, gpa, script, language);
}
/** {@inheritDoc} */
protected void addSubtable(GlyphSubtable subtable) {
if (subtable instanceof GlyphClassSubtable) {
this.gct = (GlyphClassSubtable) subtable;
} else if (subtable instanceof AttachmentPointSubtable) {
// TODO - not yet used
// this.apt = (AttachmentPointSubtable) subtable;
} else if (subtable instanceof LigatureCaretSubtable) {
// TODO - not yet used
// this.lct = (LigatureCaretSubtable) subtable;
} else if (subtable instanceof MarkAttachmentSubtable) {
this.mat = (MarkAttachmentSubtable) subtable;
} else {
throw new UnsupportedOperationException("unsupported glyph definition subtable type: " + subtable);
}
}
/**
* Determine if glyph belongs to pre-defined glyph class.
* @param gid a glyph identifier (index)
* @param gc a pre-defined glyph class (GLYPH_CLASS_BASE|GLYPH_CLASS_LIGATURE|GLYPH_CLASS_MARK|GLYPH_CLASS_COMPONENT).
* @return true if glyph belongs to specified glyph class
*/
public boolean isGlyphClass(int gid, int gc) {
if (gct != null) {
return gct.isGlyphClass(gid, gc);
} else {
return false;
}
}
/**
* Determine glyph class.
* @param gid a glyph identifier (index)
* @return a pre-defined glyph class (GLYPH_CLASS_BASE|GLYPH_CLASS_LIGATURE|GLYPH_CLASS_MARK|GLYPH_CLASS_COMPONENT).
*/
public int getGlyphClass(int gid) {
if (gct != null) {
return gct.getGlyphClass(gid);
} else {
return -1;
}
}
/**
* Determine if glyph belongs to (font specific) mark attachment class.
* @param gid a glyph identifier (index)
* @param mac a (font specific) mark attachment class
* @return true if glyph belongs to specified mark attachment class
*/
public boolean isMarkAttachClass(int gid, int mac) {
if (mat != null) {
return mat.isMarkAttachClass(gid, mac);
} else {
return false;
}
}
/**
* Determine mark attachment class.
* @param gid a glyph identifier (index)
* @return a non-negative mark attachment class, or -1 if no class defined
*/
public int getMarkAttachClass(int gid) {
if (mat != null) {
return mat.getMarkAttachClass(gid);
} else {
return -1;
}
}
/**
* Map a lookup type name to its constant (integer) value.
* @param name lookup type name
* @return lookup type
*/
public static int getLookupTypeFromName(String name) {
int t;
String s = name.toLowerCase();
if ("glyphclass".equals(s)) {
t = GDEF_LOOKUP_TYPE_GLYPH_CLASS;
} else if ("attachmentpoint".equals(s)) {
t = GDEF_LOOKUP_TYPE_ATTACHMENT_POINT;
} else if ("ligaturecaret".equals(s)) {
t = GDEF_LOOKUP_TYPE_LIGATURE_CARET;
} else if ("markattachment".equals(s)) {
t = GDEF_LOOKUP_TYPE_MARK_ATTACHMENT;
} else {
t = -1;
}
return t;
}
/**
* Map a lookup type constant (integer) value to its name.
* @param type lookup type
* @return lookup type name
*/
public static String getLookupTypeName(int type) {
String tn = null;
switch (type) {
case GDEF_LOOKUP_TYPE_GLYPH_CLASS:
tn = "glyphclass";
break;
case GDEF_LOOKUP_TYPE_ATTACHMENT_POINT:
tn = "attachmentpoint";
break;
case GDEF_LOOKUP_TYPE_LIGATURE_CARET:
tn = "ligaturecaret";
break;
case GDEF_LOOKUP_TYPE_MARK_ATTACHMENT:
tn = "markattachment";
break;
default:
tn = "unknown";
break;
}
return tn;
}
/**
* Create a definition subtable according to the specified arguments.
* @param type subtable type
* @param id subtable identifier
* @param sequence subtable sequence
* @param flags subtable flags (must be zero)
* @param format subtable format
* @param mapping subtable mapping table
* @param entries subtable entries
* @return a glyph subtable instance
*/
public static GlyphSubtable createSubtable(int type, String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
GlyphSubtable st = null;
switch (type) {
case GDEF_LOOKUP_TYPE_GLYPH_CLASS:
st = GlyphClassSubtable.create(id, sequence, flags, format, mapping, entries);
break;
case GDEF_LOOKUP_TYPE_ATTACHMENT_POINT:
st = AttachmentPointSubtable.create(id, sequence, flags, format, mapping, entries);
break;
case GDEF_LOOKUP_TYPE_LIGATURE_CARET:
st = LigatureCaretSubtable.create(id, sequence, flags, format, mapping, entries);
break;
case GDEF_LOOKUP_TYPE_MARK_ATTACHMENT:
st = MarkAttachmentSubtable.create(id, sequence, flags, format, mapping, entries);
break;
default:
break;
}
return st;
}
private abstract static class GlyphClassSubtable extends GlyphDefinitionSubtable {
GlyphClassSubtable(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping);
}
/** {@inheritDoc} */
public int getType() {
return GDEF_LOOKUP_TYPE_GLYPH_CLASS;
}
/**
* Determine if glyph belongs to pre-defined glyph class.
* @param gid a glyph identifier (index)
* @param gc a pre-defined glyph class (GLYPH_CLASS_BASE|GLYPH_CLASS_LIGATURE|GLYPH_CLASS_MARK|GLYPH_CLASS_COMPONENT).
* @return true if glyph belongs to specified glyph class
*/
public abstract boolean isGlyphClass(int gid, int gc);
/**
* Determine glyph class.
* @param gid a glyph identifier (index)
* @return a pre-defined glyph class (GLYPH_CLASS_BASE|GLYPH_CLASS_LIGATURE|GLYPH_CLASS_MARK|GLYPH_CLASS_COMPONENT).
*/
public abstract int getGlyphClass(int gid);
static GlyphDefinitionSubtable create(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
if (format == 1) {
return new GlyphClassSubtableFormat1(id, sequence, flags, format, mapping, entries);
} else {
throw new UnsupportedOperationException();
}
}
}
private static class GlyphClassSubtableFormat1 extends GlyphClassSubtable {
GlyphClassSubtableFormat1(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping, entries);
}
/** {@inheritDoc} */
public List getEntries() {
return null;
}
/** {@inheritDoc} */
public boolean isCompatible(GlyphSubtable subtable) {
return subtable instanceof GlyphClassSubtable;
}
/** {@inheritDoc} */
public boolean isGlyphClass(int gid, int gc) {
GlyphClassMapping cm = getClasses();
if (cm != null) {
return cm.getClassIndex(gid, 0) == gc;
} else {
return false;
}
}
/** {@inheritDoc} */
public int getGlyphClass(int gid) {
GlyphClassMapping cm = getClasses();
if (cm != null) {
return cm.getClassIndex(gid, 0);
} else {
return -1;
}
}
}
private abstract static class AttachmentPointSubtable extends GlyphDefinitionSubtable {
AttachmentPointSubtable(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping);
}
/** {@inheritDoc} */
public int getType() {
return GDEF_LOOKUP_TYPE_ATTACHMENT_POINT;
}
static GlyphDefinitionSubtable create(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
if (format == 1) {
return new AttachmentPointSubtableFormat1(id, sequence, flags, format, mapping, entries);
} else {
throw new UnsupportedOperationException();
}
}
}
private static class AttachmentPointSubtableFormat1 extends AttachmentPointSubtable {
AttachmentPointSubtableFormat1(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping, entries);
}
/** {@inheritDoc} */
public List getEntries() {
return null;
}
/** {@inheritDoc} */
public boolean isCompatible(GlyphSubtable subtable) {
return subtable instanceof AttachmentPointSubtable;
}
}
private abstract static class LigatureCaretSubtable extends GlyphDefinitionSubtable {
LigatureCaretSubtable(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping);
}
/** {@inheritDoc} */
public int getType() {
return GDEF_LOOKUP_TYPE_LIGATURE_CARET;
}
static GlyphDefinitionSubtable create(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
if (format == 1) {
return new LigatureCaretSubtableFormat1(id, sequence, flags, format, mapping, entries);
} else {
throw new UnsupportedOperationException();
}
}
}
private static class LigatureCaretSubtableFormat1 extends LigatureCaretSubtable {
LigatureCaretSubtableFormat1(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping, entries);
}
/** {@inheritDoc} */
public List getEntries() {
return null;
}
/** {@inheritDoc} */
public boolean isCompatible(GlyphSubtable subtable) {
return subtable instanceof LigatureCaretSubtable;
}
}
private abstract static class MarkAttachmentSubtable extends GlyphDefinitionSubtable {
MarkAttachmentSubtable(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping);
}
/** {@inheritDoc} */
public int getType() {
return GDEF_LOOKUP_TYPE_MARK_ATTACHMENT;
}
/**
* Determine if glyph belongs to (font specific) mark attachment class.
* @param gid a glyph identifier (index)
* @param mac a (font specific) mark attachment class
* @return true if glyph belongs to specified mark attachment class
*/
public abstract boolean isMarkAttachClass(int gid, int mac);
/**
* Determine mark attachment class.
* @param gid a glyph identifier (index)
* @return a non-negative mark attachment class, or -1 if no class defined
*/
public abstract int getMarkAttachClass(int gid);
static GlyphDefinitionSubtable create(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
if (format == 1) {
return new MarkAttachmentSubtableFormat1(id, sequence, flags, format, mapping, entries);
} else {
throw new UnsupportedOperationException();
}
}
}
private static class MarkAttachmentSubtableFormat1 extends MarkAttachmentSubtable {
MarkAttachmentSubtableFormat1(String id, int sequence, int flags, int format, GlyphMappingTable mapping, List entries) {
super(id, sequence, flags, format, mapping, entries);
}
/** {@inheritDoc} */
public List getEntries() {
return null;
}
/** {@inheritDoc} */
public boolean isCompatible(GlyphSubtable subtable) {
return subtable instanceof MarkAttachmentSubtable;
}
/** {@inheritDoc} */
public boolean isMarkAttachClass(int gid, int mac) {
GlyphClassMapping cm = getClasses();
if (cm != null) {
return cm.getClassIndex(gid, 0) == mac;
} else {
return false;
}
}
/** {@inheritDoc} */
public int getMarkAttachClass(int gid) {
GlyphClassMapping cm = getClasses();
if (cm != null) {
return cm.getClassIndex(gid, 0);
} else {
return -1;
}
}
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package uk.ac.cam.caret.sakai.rwiki.component.service.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Writer;
import java.net.URI;
import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.URIResolver;
import javax.xml.transform.stream.StreamSource;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fop.apps.FOPException;
import org.apache.fop.apps.FOUserAgent;
import org.apache.fop.apps.Fop;
import org.apache.fop.apps.FopFactory;
import org.apache.fop.apps.MimeConstants;
import org.apache.xml.serializer.DOMSerializer;
import org.apache.xml.serializer.ToXMLSAXHandler;
import org.apache.xml.serializer.ToSAXHandler;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.content.api.ContentResource;
import org.sakaiproject.content.cover.ContentHostingService;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.cover.EntityManager;
import org.w3c.dom.Node;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class BaseFOPSerializer extends ToSAXHandler implements ContentHandler
{
private static final Log logger = LogFactory
.getLog(BaseFOPSerializer.class);
private static final String configfile = "/uk/ac/cam/caret/sakai/rwiki/component/service/impl/fop.cfg.xml";
private Properties outputFormat = null;
private Writer writer = null;
private OutputStream outputStream = null;
private ContentHandler contentHandler = null;
private Fop fop = null;
private ToXMLSAXHandler sax = null;
protected String mimeType = MimeConstants.MIME_PDF;
/**
* {@inheritDoc}
*/
public void setOutputStream(OutputStream arg0)
{
this.outputStream = arg0;
}
/**
* {@inheritDoc}
*/
public OutputStream getOutputStream()
{
return outputStream;
}
/**
* {@inheritDoc}
*/
public void setWriter(Writer arg0)
{
this.writer = arg0;
}
/**
* {@inheritDoc}
*/
public Writer getWriter()
{
return writer;
}
/**
* {@inheritDoc}
*/
public void setOutputFormat(Properties arg0)
{
this.outputFormat = arg0;
}
/**
* {@inheritDoc}
*/
public Properties getOutputFormat()
{
return outputFormat;
}
/**
* {@inheritDoc}
*/
public ContentHandler asContentHandler() throws IOException
{
if (fop == null)
{
InputStream stream = null;
try
{
DefaultConfigurationBuilder cfgBuild = new DefaultConfigurationBuilder();
stream = getClass()
.getResourceAsStream(configfile);
Configuration cfg = cfgBuild.build(stream);
final FopFactory ff = FopFactory.newInstance();
ff.setUserConfig(cfg);
FOUserAgent userAgent = ff.newFOUserAgent();
userAgent.setURIResolver(new URIResolver()
{
public Source resolve(String href, String base)
throws TransformerException
{
Source source = null;
try
{
logger.info("Resolving " + href + " from " + base);
HttpServletRequest request = XSLTEntityHandler
.getCurrentRequest();
if (request != null && href.startsWith("/access"))
{
// going direct into the ContentHandler Service
try
{
String path = href.substring("/access"
.length());
Reference ref = EntityManager
.newReference(path);
ContentResource resource = ContentHostingService
.getResource(ref.getId());
return new StreamSource(resource
.streamContent());
}
catch (Exception ex)
{
URI uri = new URI(base);
String content = uri.resolve(href)
.toString();
source = new StreamSource(content);
}
}
else
{
// use default resolver to resolve font
if (base == null)
{
return ff.resolveURI(href, base);
}
URI uri = new URI(base);
String content = uri.resolve(href).toString();
source = new StreamSource(content);
}
}
catch (Exception ex)
{
throw new TransformerException("Failed to get "
+ href, ex);
}
return source;
}
});
userAgent.setBaseURL(ServerConfigurationService
.getString("serverUrl"));
fop = ff.newFop(mimeType, userAgent, outputStream);
}
catch (Exception e)
{
logger.error("Failed to create Handler ",e);
throw new IOException("Failed to create " + mimeType
+ " Serializer: " + e.getMessage());
}
finally {
if (stream != null) {
stream.close();
}
}
}
DefaultHandler dh;
try
{
dh = fop.getDefaultHandler();
}
catch (FOPException e)
{
logger.error("Failed to get FOP Handler ",e);
throw new RuntimeException("Failed to get FOP Handler ", e);
}
return dh;
}
/**
* {@inheritDoc}
*/
public DOMSerializer asDOMSerializer()
{
return null;
}
/**
* {@inheritDoc}
*/
public boolean reset()
{
fop = null;
outputFormat = null;
writer = null;
outputStream = null;
sax = null;
return false;
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#characters(char[], int, int)
*/
public void characters(char[] ch, int start, int length) throws SAXException
{
initContentHandler();
contentHandler.characters(ch, start, length);
}
/**
* @throws SAXException
*
*/
private void initContentHandler() throws SAXException
{
if ( contentHandler == null ) {
try
{
contentHandler = asContentHandler();
}
catch (IOException e)
{
throw new SAXException(e);
}
}
if (sax == null) {
sax = new ToXMLSAXHandler(contentHandler,"UTF-8");
}
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#endDocument()
*/
public void endDocument() throws SAXException
{
initContentHandler();
contentHandler.endDocument();
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
public void endElement(String uri, String localName, String qName) throws SAXException
{
initContentHandler();
contentHandler.endElement(uri, localName, qName);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#endPrefixMapping(java.lang.String)
*/
public void endPrefixMapping(String prefix) throws SAXException
{
initContentHandler();
contentHandler.endPrefixMapping(prefix);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#ignorableWhitespace(char[], int, int)
*/
public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException
{
initContentHandler();
contentHandler.ignorableWhitespace(ch, start, length);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#processingInstruction(java.lang.String, java.lang.String)
*/
public void processingInstruction(String target, String data) throws SAXException
{
initContentHandler();
contentHandler.processingInstruction(target, data);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#setDocumentLocator(org.xml.sax.Locator)
*/
public void setDocumentLocator(Locator locator)
{
try
{
initContentHandler();
}
catch (SAXException e)
{
logger.error(e);
}
contentHandler.setDocumentLocator(locator);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#skippedEntity(java.lang.String)
*/
public void skippedEntity(String name) throws SAXException
{
initContentHandler();
contentHandler.skippedEntity(name);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#startDocument()
*/
public void startDocument() throws SAXException
{
initContentHandler();
contentHandler.startDocument();
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException
{
initContentHandler();
contentHandler.startElement(uri, localName, qName, atts);
}
/* (non-Javadoc)
* @see org.xml.sax.ContentHandler#startPrefixMapping(java.lang.String, java.lang.String)
*/
public void startPrefixMapping(String prefix, String uri) throws SAXException
{
initContentHandler();
contentHandler.startPrefixMapping(prefix, uri);
}
public boolean setEscaping(boolean escape) throws SAXException
{
initContentHandler();
return sax.setEscaping(escape);
}
@Override
public void attributeDecl(String arg0, String arg1, String arg2,
String arg3, String arg4) throws SAXException {
initContentHandler();
sax.attributeDecl(arg0,arg1,arg2,arg3,arg4);
}
@Override
public void comment(char[] arg0, int arg1, int arg2) throws SAXException {
initContentHandler();
sax.comment(arg0,arg1,arg2);
}
@Override
public void elementDecl(String arg0, String arg1) throws SAXException {
initContentHandler();
sax.elementDecl(arg0,arg1);
}
@Override
public void endCDATA() throws SAXException {
initContentHandler();
sax.endCDATA();
}
@Override
public void endDTD() throws SAXException {
initContentHandler();
sax.endDTD();
}
@Override
public void endElement(String arg0) throws SAXException {
initContentHandler();
sax.endElement(arg0);
}
@Override
public void externalEntityDecl(String arg0, String arg1, String arg2)
throws SAXException {
initContentHandler();
sax.externalEntityDecl(arg0, arg1, arg2);
}
@Override
public void internalEntityDecl(String arg0, String arg1)
throws SAXException {
initContentHandler();
sax.internalEntityDecl(arg0, arg1);
}
@Override
public void serialize(Node arg0) throws IOException {
try {
initContentHandler();
} catch (SAXException e) {
logger.error(e);
}
sax.serialize(arg0);
}
@Override
public void startCDATA() throws SAXException {
initContentHandler();
sax.startCDATA();
}
@Override
public void startEntity(String arg0) throws SAXException {
initContentHandler();
sax.startEntity(arg0);
}
@Override
public boolean startPrefixMapping(String arg0, String arg1, boolean arg2)
throws SAXException {
initContentHandler();
return sax.startPrefixMapping(arg0, arg1,arg2);
}
}
| |
/******************************************************************************
* Compilation: javac StdIn.java
* Execution: java StdIn (interactive test of basic functionality)
* Dependencies: none
*
* Reads in data of various types from standard input.
*
******************************************************************************/
package edu.princeton.cs.algs4;
import java.util.ArrayList;
import java.util.InputMismatchException;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.Scanner;
import java.util.regex.Pattern;
/**
* The {@code StdIn} class provides static methods for reading strings
* and numbers from standard input.
* These functions fall into one of four categories:
* <ul>
* <li>those for reading individual tokens from standard input, one at a time,
* and converting each to a number, string, or boolean
* <li>those for reading characters from standard input, one at a time
* <li>those for reading lines from standard input, one at a time
* <li>those for reading a sequence of values of the same type from standard input,
* and returning the values in an array
* </ul>
* <p>
* Generally, it is best not to mix functions from the different
* categories in the same program.
* <p>
* <b>Reading tokens from standard input and converting to numbers and strings.</b>
* You can use the following methods to read numbers, strings, and booleans
* from standard input one at a time:
* <ul>
* <li> {@link #isEmpty()}
* <li> {@link #readInt()}
* <li> {@link #readDouble()}
* <li> {@link #readString()}
* <li> {@link #readShort()}
* <li> {@link #readLong()}
* <li> {@link #readFloat()}
* <li> {@link #readByte()}
* <li> {@link #readBoolean()}
* </ul>
* <p>
* The first method returns true if standard input has more tokens.
* Each other method skips over any input that is whitespace. Then, it reads
* the next token and attempts to convert it into a value of the specified
* type. If it succeeds, it returns that value; otherwise, it
* throws an {@link InputMismatchException}.
* <p>
* <em>Whitespace</em> includes spaces, tabs, and newlines; the full definition
* is inherited from {@link Character#isWhitespace(char)}.
* A <em>token</em> is a maximal sequence of non-whitespace characters.
* The precise rules for describing which tokens can be converted to
* integers and floating-point numbers are inherited from
* <a href = "http://docs.oracle.com/javase/7/docs/api/java/util/Scanner.html#number-syntax">Scanner</a>,
* using the locale {@link Locale#US}; the rules
* for floating-point numbers are slightly different
* from those in {@link Double#valueOf(String)},
* but unlikely to be of concern to most programmers.
* <p>
* As an example, the following code fragment reads integers from standard input,
* one at a time, and prints them one per line.
* <pre>
* while (!StdIn.isEmpty()) {
* double value = StdIn.readDouble();
* StdOut.println(value);
* }
* StdOut.println(sum);
* </pre>
* <p>
* <b>Reading characters from standard input.</b>
* You can use the following two methods to read characters from standard input one at a time:
* <ul>
* <li> {@link #hasNextChar()}
* <li> {@link #readChar()}
* </ul>
* <p>
* The first method returns true if standard input has more input (including whitespace).
* The second method reads and returns the next character of input on standard
* input (possibly a whitespace character).
* <p>
* As an example, the following code fragment reads characters from standard input,
* one character at a time, and prints it to standard output.
* <pre>
* while (StdIn.hasNextChar()) {
* char c = StdIn.readChar();
* StdOut.print(c);
* }
* </pre>
* <p>
* <b>Reading lines from standard input.</b>
* You can use the following two methods to read lines from standard input:
* <ul>
* <li> {@link #hasNextLine()}
* <li> {@link #readLine()}
* </ul>
* <p>
* The first method returns true if standard input has more input (including whitespace).
* The second method reads and returns the remaining portion of
* the next line of input on standard input (possibly whitespace),
* discarding the trailing line separator.
* <p>
* A <em>line separator</em> is defined to be one of the following strings:
* {@code \n} (Linux), {@code \r} (old Macintosh),
* {@code \r\n} (Windows),
* {@code \}{@code u2028}, {@code \}{@code u2029}, or {@code \}{@code u0085}.
* <p>
* As an example, the following code fragment reads text from standard input,
* one line at a time, and prints it to standard output.
* <pre>
* while (StdIn.hasNextLine()) {
* String line = StdIn.readLine();
* StdOut.println(line);
* }
* </pre>
* <p>
* <b>Reading a sequence of values of the same type from standard input.</b>
* You can use the following methods to read a sequence numbers, strings,
* or booleans (all of the same type) from standard input:
* <ul>
* <li> {@link #readAllDoubles()}
* <li> {@link #readAllInts()}
* <li> {@link #readAllLongs()}
* <li> {@link #readAllStrings()}
* <li> {@link #readAllLines()}
* <li> {@link #readAll()}
* </ul>
* <p>
* The first three methods read of all of remaining token on standard input
* and converts the tokens to values of
* the specified type, as in the corresponding
* {@code readDouble}, {@code readInt}, and {@code readString()} methods.
* The {@code readAllLines()} method reads all remaining lines on standard
* input and returns them as an array of strings.
* The {@code readAll()} method reads all remaining input on standard
* input and returns it as a string.
* <p>
* As an example, the following code fragment reads all of the remaining
* tokens from standard input and returns them as an array of strings.
* <pre>
* String[] words = StdIn.readAllStrings();
* </pre>
* <p>
* <b>Differences with Scanner.</b>
* {@code StdIn} and {@link Scanner} are both designed to parse
* tokens and convert them to primitive types and strings.
* The main differences are summarized below:
* <ul>
* <li> {@code StdIn} is a set of static methods and reads
* reads input from only standard input. It is suitable for use before
* a programmer knows about objects.
* See {@link In} for an object-oriented version that handles
* input from files, URLs,
* and sockets.
* <li> {@code StdIn} uses whitespace as the delimiter pattern
* that separates tokens.
* {@link Scanner} supports arbitrary delimiter patterns.
* <li> {@code StdIn} coerces the character-set encoding to UTF-8,
* which is the most widely used character encoding for Unicode.
* <li> {@code StdIn} coerces the locale to {@link Locale#US},
* for consistency with {@link StdOut}, {@link Double#parseDouble(String)},
* and floating-point literals.
* <li> {@code StdIn} has convenient methods for reading a single
* character; reading in sequences of integers, doubles, or strings;
* and reading in all of the remaining input.
* </ul>
* <p>
* Historical note: {@code StdIn} preceded {@code Scanner}; when
* {@code Scanner} was introduced, this class was re-implemented to use {@code Scanner}.
* <p>
* <b>Using standard input.</b>
* Standard input is fundamental operating system abstraction, on Mac OS X,
* Windows, and Linux.
* The methods in {@code StdIn} are <em>blocking</em>, which means that they
* will wait until you enter input on standard input.
* If your program has a loop that repeats until standard input is empty,
* you must signal that the input is finished.
* To do so, depending on your operating system and IDE,
* use either {@code <Ctrl-d>} or {@code <Ctrl-z>}, on its own line.
* If you are redirecting standard input from a file, you will not need
* to do anything to signal that the input is finished.
* <p>
* <b>Known bugs.</b>
* Java's UTF-8 encoding does not recognize the optional
* <a href = "http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4508058">byte-order mask</a>.
* If the input begins with the optional byte-order mask, {@code StdIn}
* will have an extra character {@code \}{@code uFEFF} at the beginning.
* <p>
* <b>Reference.</b>
* For additional documentation,
* see <a href="https://introcs.cs.princeton.edu/15inout">Section 1.5</a> of
* <em>Computer Science: An Interdisciplinary Approach</em>
* by Robert Sedgewick and Kevin Wayne.
*
* @author Robert Sedgewick
* @author Kevin Wayne
* @author David Pritchard
*/
public final class StdIn {
/*** begin: section (1 of 2) of code duplicated from In to StdIn. */
// assume Unicode UTF-8 encoding
private static final String CHARSET_NAME = "UTF-8";
// assume language = English, country = US for consistency with System.out.
private static final Locale LOCALE = Locale.US;
// the default token separator; we maintain the invariant that this value
// is held by the scanner's delimiter between calls
private static final Pattern WHITESPACE_PATTERN = Pattern.compile("\\p{javaWhitespace}+");
// makes whitespace significant
private static final Pattern EMPTY_PATTERN = Pattern.compile("");
// used to read the entire input
private static final Pattern EVERYTHING_PATTERN = Pattern.compile("\\A");
/*** end: section (1 of 2) of code duplicated from In to StdIn. */
private static Scanner scanner;
// it doesn't make sense to instantiate this class
private StdIn() { }
//// begin: section (2 of 2) of code duplicated from In to StdIn,
//// with all methods changed from "public" to "public static"
/**
* Returns true if standard input is empty (except possibly for whitespace).
* Use this method to know whether the next call to {@link #readString()},
* {@link #readDouble()}, etc will succeed.
*
* @return {@code true} if standard input is empty (except possibly
* for whitespace); {@code false} otherwise
*/
public static boolean isEmpty() {
return !scanner.hasNext();
}
/**
* Returns true if standard input has a next line.
* Use this method to know whether the
* next call to {@link #readLine()} will succeed.
* This method is functionally equivalent to {@link #hasNextChar()}.
*
* @return {@code true} if standard input has more input (including whitespace);
* {@code false} otherwise
*/
public static boolean hasNextLine() {
return scanner.hasNextLine();
}
/**
* Returns true if standard input has more input (including whitespace).
* Use this method to know whether the next call to {@link #readChar()} will succeed.
* This method is functionally equivalent to {@link #hasNextLine()}.
*
* @return {@code true} if standard input has more input (including whitespace);
* {@code false} otherwise
*/
public static boolean hasNextChar() {
scanner.useDelimiter(EMPTY_PATTERN);
boolean result = scanner.hasNext();
scanner.useDelimiter(WHITESPACE_PATTERN);
return result;
}
/**
* Reads and returns the next line, excluding the line separator if present.
*
* @return the next line, excluding the line separator if present;
* {@code null} if no such line
*/
public static String readLine() {
String line;
try {
line = scanner.nextLine();
}
catch (NoSuchElementException e) {
line = null;
}
return line;
}
/**
* Reads and returns the next character.
*
* @return the next {@code char}
* @throws NoSuchElementException if standard input is empty
*/
public static char readChar() {
try {
scanner.useDelimiter(EMPTY_PATTERN);
String ch = scanner.next();
assert ch.length() == 1 : "Internal (Std)In.readChar() error!"
+ " Please contact the authors.";
scanner.useDelimiter(WHITESPACE_PATTERN);
return ch.charAt(0);
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'char' value from standard input, but there are no more tokens available");
}
}
/**
* Reads and returns the remainder of the input, as a string.
*
* @return the remainder of the input, as a string
* @throws NoSuchElementException if standard input is empty
*/
public static String readAll() {
if (!scanner.hasNextLine())
return "";
String result = scanner.useDelimiter(EVERYTHING_PATTERN).next();
// not that important to reset delimeter, since now scanner is empty
scanner.useDelimiter(WHITESPACE_PATTERN); // but let's do it anyway
return result;
}
/**
* Reads the next token and returns the {@code String}.
*
* @return the next {@code String}
* @throws NoSuchElementException if standard input is empty
*/
public static String readString() {
try {
return scanner.next();
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'String' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as an integer, and returns the integer.
*
* @return the next integer on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as an {@code int}
*/
public static int readInt() {
try {
return scanner.nextInt();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read an 'int' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attemps to read an 'int' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a double, and returns the double.
*
* @return the next double on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code double}
*/
public static double readDouble() {
try {
return scanner.nextDouble();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read a 'double' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'double' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a float, and returns the float.
*
* @return the next float on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code float}
*/
public static float readFloat() {
try {
return scanner.nextFloat();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read a 'float' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'float' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a long integer, and returns the long integer.
*
* @return the next long integer on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code long}
*/
public static long readLong() {
try {
return scanner.nextLong();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read a 'long' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'long' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a short integer, and returns the short integer.
*
* @return the next short integer on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code short}
*/
public static short readShort() {
try {
return scanner.nextShort();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read a 'short' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'short' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a byte, and returns the byte.
*
* @return the next byte on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code byte}
*/
public static byte readByte() {
try {
return scanner.nextByte();
}
catch (InputMismatchException e) {
String token = scanner.next();
throw new InputMismatchException("attempts to read a 'byte' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'byte' value from standard input, but there are no more tokens available");
}
}
/**
* Reads the next token from standard input, parses it as a boolean,
* and returns the boolean.
*
* @return the next boolean on standard input
* @throws NoSuchElementException if standard input is empty
* @throws InputMismatchException if the next token cannot be parsed as a {@code boolean}:
* {@code true} or {@code 1} for true, and {@code false} or {@code 0} for false,
* ignoring case
*/
public static boolean readBoolean() {
try {
String token = readString();
if ("true".equalsIgnoreCase(token)) return true;
if ("false".equalsIgnoreCase(token)) return false;
if ("1".equals(token)) return true;
if ("0".equals(token)) return false;
throw new InputMismatchException("attempts to read a 'boolean' value from standard input, but the next token is \"" + token + "\"");
}
catch (NoSuchElementException e) {
throw new NoSuchElementException("attempts to read a 'boolean' value from standard input, but there are no more tokens available");
}
}
/**
* Reads all remaining tokens from standard input and returns them as an array of strings.
*
* @return all remaining tokens on standard input, as an array of strings
*/
public static String[] readAllStrings() {
// we could use readAll.trim().split(), but that's not consistent
// because trim() uses characters 0x00..0x20 as whitespace
String[] tokens = WHITESPACE_PATTERN.split(readAll());
if (tokens.length == 0 || tokens[0].length() > 0)
return tokens;
// don't include first token if it is leading whitespace
String[] decapitokens = new String[tokens.length-1];
for (int i = 0; i < tokens.length - 1; i++)
decapitokens[i] = tokens[i+1];
return decapitokens;
}
/**
* Reads all remaining lines from standard input and returns them as an array of strings.
* @return all remaining lines on standard input, as an array of strings
*/
public static String[] readAllLines() {
ArrayList<String> lines = new ArrayList<String>();
while (hasNextLine()) {
lines.add(readLine());
}
return lines.toArray(new String[lines.size()]);
}
/**
* Reads all remaining tokens from standard input, parses them as integers, and returns
* them as an array of integers.
* @return all remaining integers on standard input, as an array
* @throws InputMismatchException if any token cannot be parsed as an {@code int}
*/
public static int[] readAllInts() {
String[] fields = readAllStrings();
int[] vals = new int[fields.length];
for (int i = 0; i < fields.length; i++)
vals[i] = Integer.parseInt(fields[i]);
return vals;
}
/**
* Reads all remaining tokens from standard input, parses them as longs, and returns
* them as an array of longs.
* @return all remaining longs on standard input, as an array
* @throws InputMismatchException if any token cannot be parsed as a {@code long}
*/
public static long[] readAllLongs() {
String[] fields = readAllStrings();
long[] vals = new long[fields.length];
for (int i = 0; i < fields.length; i++)
vals[i] = Long.parseLong(fields[i]);
return vals;
}
/**
* Reads all remaining tokens from standard input, parses them as doubles, and returns
* them as an array of doubles.
* @return all remaining doubles on standard input, as an array
* @throws InputMismatchException if any token cannot be parsed as a {@code double}
*/
public static double[] readAllDoubles() {
String[] fields = readAllStrings();
double[] vals = new double[fields.length];
for (int i = 0; i < fields.length; i++)
vals[i] = Double.parseDouble(fields[i]);
return vals;
}
//// end: section (2 of 2) of code duplicated from In to StdIn
// do this once when StdIn is initialized
static {
resync();
}
/**
* If StdIn changes, use this to reinitialize the scanner.
*/
private static void resync() {
setScanner(new Scanner(new java.io.BufferedInputStream(System.in), CHARSET_NAME));
}
private static void setScanner(Scanner scanner) {
StdIn.scanner = scanner;
StdIn.scanner.useLocale(LOCALE);
}
/**
* Reads all remaining tokens, parses them as integers, and returns
* them as an array of integers.
* @return all remaining integers, as an array
* @throws InputMismatchException if any token cannot be parsed as an {@code int}
* @deprecated Replaced by {@link #readAllInts()}.
*/
@Deprecated
public static int[] readInts() {
return readAllInts();
}
/**
* Reads all remaining tokens, parses them as doubles, and returns
* them as an array of doubles.
* @return all remaining doubles, as an array
* @throws InputMismatchException if any token cannot be parsed as a {@code double}
* @deprecated Replaced by {@link #readAllDoubles()}.
*/
@Deprecated
public static double[] readDoubles() {
return readAllDoubles();
}
/**
* Reads all remaining tokens and returns them as an array of strings.
* @return all remaining tokens, as an array of strings
* @deprecated Replaced by {@link #readAllStrings()}.
*/
@Deprecated
public static String[] readStrings() {
return readAllStrings();
}
/**
* Interactive test of basic functionality.
*
* @param args the command-line arguments
*/
public static void main(String[] args) {
StdOut.print("Type a string: ");
String s = StdIn.readString();
StdOut.println("Your string was: " + s);
StdOut.println();
StdOut.print("Type an int: ");
int a = StdIn.readInt();
StdOut.println("Your int was: " + a);
StdOut.println();
StdOut.print("Type a boolean: ");
boolean b = StdIn.readBoolean();
StdOut.println("Your boolean was: " + b);
StdOut.println();
StdOut.print("Type a double: ");
double c = StdIn.readDouble();
StdOut.println("Your double was: " + c);
StdOut.println();
}
}
/******************************************************************************
* Copyright 2002-2016, Robert Sedgewick and Kevin Wayne.
*
* This file is part of algs4.jar, which accompanies the textbook
*
* Algorithms, 4th edition by Robert Sedgewick and Kevin Wayne,
* Addison-Wesley Professional, 2011, ISBN 0-321-57351-X.
* http://algs4.cs.princeton.edu
*
*
* algs4.jar is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* algs4.jar is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with algs4.jar. If not, see http://www.gnu.org/licenses.
******************************************************************************/
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.copy;
import com.intellij.ide.util.DirectoryUtil;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.help.HelpManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.DialogWrapperPeer;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TextComponentAccessor;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.RecentsManager;
import com.intellij.ui.TextFieldWithHistoryWithBrowseButton;
import com.intellij.ui.components.JBLabelDecorator;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.PathUtil;
import com.intellij.util.ui.FormBuilder;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import java.awt.*;
import java.io.File;
import java.util.List;
public class CopyFilesOrDirectoriesDialog extends DialogWrapper {
public static final int MAX_PATH_LENGTH = 70;
private static final String COPY_OPEN_IN_EDITOR = "Copy.OpenInEditor";
private static final String RECENT_KEYS = "CopyFile.RECENT_KEYS";
public static String shortenPath(VirtualFile file) {
return StringUtil.shortenPathWithEllipsis(file.getPresentableUrl(), MAX_PATH_LENGTH);
}
public static JCheckBox createOpenInEditorCB() {
JCheckBox checkBox = new JCheckBox("Open copy in editor", PropertiesComponent.getInstance().getBoolean(COPY_OPEN_IN_EDITOR, true));
checkBox.setMnemonic('o');
return checkBox;
}
public static void saveOpenInEditorState(boolean selected) {
PropertiesComponent.getInstance().setValue(COPY_OPEN_IN_EDITOR, String.valueOf(selected));
}
private JLabel myInformationLabel;
private TextFieldWithHistoryWithBrowseButton myTargetDirectoryField;
private JCheckBox myOpenFilesInEditor = createOpenInEditorCB();
private JTextField myNewNameField;
private final Project myProject;
private final boolean myShowDirectoryField;
private final boolean myShowNewNameField;
private PsiDirectory myTargetDirectory;
private boolean myFileCopy = false;
public CopyFilesOrDirectoriesDialog(PsiElement[] elements, PsiDirectory defaultTargetDirectory, Project project, boolean doClone) {
super(project, true);
myProject = project;
myShowDirectoryField = !doClone;
myShowNewNameField = elements.length == 1;
if (doClone && elements.length != 1) {
throw new IllegalArgumentException("wrong number of elements to clone: " + elements.length);
}
setTitle(RefactoringBundle.message(doClone ? "copy.files.clone.title" : "copy.files.copy.title"));
init();
if (elements.length == 1) {
String text;
if (elements[0] instanceof PsiFile) {
PsiFile file = (PsiFile)elements[0];
String url = shortenPath(file.getVirtualFile());
text = RefactoringBundle.message(doClone ? "copy.files.clone.file.0" : "copy.files.copy.file.0", url);
final String fileName = file.getName();
myNewNameField.setText(fileName);
final int dotIdx = fileName.lastIndexOf(".");
if (dotIdx > -1) {
myNewNameField.select(0, dotIdx);
myNewNameField.putClientProperty(DialogWrapperPeer.HAVE_INITIAL_SELECTION, true);
}
myFileCopy = true;
}
else {
PsiDirectory directory = (PsiDirectory)elements[0];
String url = shortenPath(directory.getVirtualFile());
text = RefactoringBundle.message(doClone ? "copy.files.clone.directory.0" : "copy.files.copy.directory.0", url);
myNewNameField.setText(directory.getName());
}
myInformationLabel.setText(text);
}
else {
setMultipleElementCopyLabel(elements);
}
boolean allBinary = true;
for (PsiElement element : elements) {
if (!(element.getContainingFile() instanceof PsiBinaryFile)) {
allBinary = false;
break;
}
}
if (allBinary) {
myOpenFilesInEditor.setVisible(false);
}
if (myShowDirectoryField) {
String targetPath = defaultTargetDirectory == null ? "" : defaultTargetDirectory.getVirtualFile().getPresentableUrl();
myTargetDirectoryField.getChildComponent().setText(targetPath);
}
validateOKButton();
}
private void setMultipleElementCopyLabel(PsiElement[] elements) {
boolean allFiles = true;
boolean allDirectories = true;
for (PsiElement element : elements) {
if (element instanceof PsiDirectory) {
allFiles = false;
}
else {
allDirectories = false;
}
}
if (allFiles) {
myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.files.label"));
}
else if (allDirectories) {
myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.directories.label"));
}
else {
myInformationLabel.setText(RefactoringBundle.message("copy.files.copy.specified.mixed.label"));
}
}
@Override
@NotNull
protected Action[] createActions() {
return new Action[]{getOKAction(), getCancelAction(), getHelpAction()};
}
@Override
public JComponent getPreferredFocusedComponent() {
return myShowNewNameField ? myNewNameField : myTargetDirectoryField.getChildComponent();
}
@Override
protected JComponent createCenterPanel() {
return new JPanel(new BorderLayout());
}
@Override
protected JComponent createNorthPanel() {
myInformationLabel = JBLabelDecorator.createJBLabelDecorator().setBold(true);
final FormBuilder formBuilder = FormBuilder.createFormBuilder().addComponent(myInformationLabel).addVerticalGap(
UIUtil.LARGE_VGAP - UIUtil.DEFAULT_VGAP);
DocumentListener documentListener = new DocumentAdapter() {
@Override
public void textChanged(DocumentEvent event) {
validateOKButton();
}
};
if (myShowNewNameField) {
myNewNameField = new JTextField();
myNewNameField.getDocument().addDocumentListener(documentListener);
formBuilder.addLabeledComponent(RefactoringBundle.message("copy.files.new.name.label"), myNewNameField);
}
if (myShowDirectoryField) {
myTargetDirectoryField = new TextFieldWithHistoryWithBrowseButton();
myTargetDirectoryField.setTextFieldPreferredWidth(MAX_PATH_LENGTH);
final List<String> recentEntries = RecentsManager.getInstance(myProject).getRecentEntries(RECENT_KEYS);
if (recentEntries != null) {
myTargetDirectoryField.getChildComponent().setHistory(recentEntries);
}
final FileChooserDescriptor descriptor = FileChooserDescriptorFactory.createSingleFolderDescriptor();
myTargetDirectoryField.addBrowseFolderListener(RefactoringBundle.message("select.target.directory"),
RefactoringBundle.message("the.file.will.be.copied.to.this.directory"),
myProject, descriptor,
TextComponentAccessor.TEXT_FIELD_WITH_HISTORY_WHOLE_TEXT);
myTargetDirectoryField.getChildComponent().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(DocumentEvent e) {
validateOKButton();
}
});
formBuilder.addLabeledComponent(RefactoringBundle.message("copy.files.to.directory.label"), myTargetDirectoryField);
String shortcutText =
KeymapUtil.getFirstKeyboardShortcutText(ActionManager.getInstance().getAction(IdeActions.ACTION_CODE_COMPLETION));
formBuilder.addTooltip(RefactoringBundle.message("path.completion.shortcut", shortcutText));
}
final JPanel wrapper = new JPanel(new BorderLayout());
wrapper.add(myOpenFilesInEditor, BorderLayout.EAST);
formBuilder.addComponent(wrapper);
return formBuilder.getPanel();
}
public PsiDirectory getTargetDirectory() {
return myTargetDirectory;
}
public String getNewName() {
return myNewNameField != null ? myNewNameField.getText().trim() : null;
}
public boolean openInEditor() {
return myOpenFilesInEditor.isSelected();
}
@Override
protected void doOKAction() {
if (myShowNewNameField) {
String newName = getNewName();
if (newName.length() == 0) {
Messages.showErrorDialog(myProject, RefactoringBundle.message("no.new.name.specified"), RefactoringBundle.message("error.title"));
return;
}
if (myFileCopy && !PathUtil.isValidFileName(newName)) {
Messages.showErrorDialog(myNewNameField, "Name is not a valid file name");
return;
}
}
saveOpenInEditorState(myOpenFilesInEditor.isSelected());
if (myShowDirectoryField) {
final String targetDirectoryName = myTargetDirectoryField.getChildComponent().getText();
if (targetDirectoryName.length() == 0) {
Messages.showErrorDialog(myProject, RefactoringBundle.message("no.target.directory.specified"),
RefactoringBundle.message("error.title"));
return;
}
RecentsManager.getInstance(myProject).registerRecentEntry(RECENT_KEYS, targetDirectoryName);
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
try {
myTargetDirectory =
DirectoryUtil.mkdirs(PsiManager.getInstance(myProject), targetDirectoryName.replace(File.separatorChar, '/'));
}
catch (IncorrectOperationException ignored) { }
}
});
}
}, RefactoringBundle.message("create.directory"), null);
if (myTargetDirectory == null) {
Messages.showErrorDialog(myProject, RefactoringBundle.message("cannot.create.directory"), RefactoringBundle.message("error.title"));
return;
}
}
super.doOKAction();
}
private void validateOKButton() {
if (myShowDirectoryField) {
if (myTargetDirectoryField.getChildComponent().getText().length() == 0) {
setOKActionEnabled(false);
return;
}
}
if (myShowNewNameField) {
final String newName = getNewName();
if (newName.length() == 0 || myFileCopy && !PathUtil.isValidFileName(newName)) {
setOKActionEnabled(false);
return;
}
}
setOKActionEnabled(true);
}
@Override
protected void doHelpAction() {
HelpManager.getInstance().invokeHelp("refactoring.copyClass");
}
}
| |
/*
* ja, a Java-bytecode translator toolkit.
* Copyright (C) 1999- Shigeru Chiba. All Rights Reserved.
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. Alternatively, the contents of this file may be used under
* the terms of the GNU Lesser General Public License Version 2.1 or later,
* or the Apache License Version 2.0.
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*/
package ja.tools.reflect;
import java.util.Iterator;
import ja.*;
import ja.CtMethod.ConstParameter;
import ja.bytecode.ClassFile;
import ja.bytecode.BadBytecode;
import ja.bytecode.MethodInfo;
/**
* The class implementing the behavioral reflection mechanism.
*
* <p>If a class is reflective,
* then all the method invocations on every
* instance of that class are intercepted by the runtime
* metaobject controlling that instance. The methods inherited from the
* super classes are also intercepted except final methods. To intercept
* a final method in a super class, that super class must be also reflective.
*
* <p>To do this, the original class file representing a reflective class:
*
* <ul><pre>
* class Person {
* public int f(int i) { return i + 1; }
* public int value;
* }
* </pre></ul>
*
* <p>is modified so that it represents a class:
*
* <ul><pre>
* class Person implements Metalevel {
* public int _original_f(int i) { return i + 1; }
* public int f(int i) { <i>delegate to the metaobject</i> }
*
* public int value;
* public int _r_value() { <i>read "value"</i> }
* public void _w_value(int v) { <i>write "value"</i> }
*
* public ClassMetaobject _getClass() { <i>return a class metaobject</i> }
* public Metaobject _getMetaobject() { <i>return a metaobject</i> }
* public void _setMetaobject(Metaobject m) { <i>change a metaobject</i> }
* }
* </pre></ul>
*
* @see ja.tools.reflect.ClassMetaobject
* @see ja.tools.reflect.Metaobject
* @see ja.tools.reflect.Loader
* @see ja.tools.reflect.Compiler
*/
public class Reflection implements Translator {
static final String classobjectField = "_classobject";
static final String classobjectAccessor = "_getClass";
static final String metaobjectField = "_metaobject";
static final String metaobjectGetter = "_getMetaobject";
static final String metaobjectSetter = "_setMetaobject";
static final String readPrefix = "_r_";
static final String writePrefix = "_w_";
static final String metaobjectClassName = "ja.tools.reflect.Metaobject";
static final String classMetaobjectClassName
= "ja.tools.reflect.ClassMetaobject";
protected CtMethod trapMethod, trapStaticMethod;
protected CtMethod trapRead, trapWrite;
protected CtClass[] readParam;
protected ClassPool classPool;
protected CodeConverter converter;
private boolean isExcluded(String name) {
return name.startsWith(ClassMetaobject.methodPrefix)
|| name.equals(classobjectAccessor)
|| name.equals(metaobjectSetter)
|| name.equals(metaobjectGetter)
|| name.startsWith(readPrefix)
|| name.startsWith(writePrefix);
}
/**
* Constructs a new <code>Reflection</code> object.
*/
public Reflection() {
classPool = null;
converter = new CodeConverter();
}
/**
* Initializes the object.
*/
public void start(ClassPool pool) throws NotFoundException {
classPool = pool;
final String msg
= "ja.tools.reflect.Sample is not found or broken.";
try {
CtClass c = classPool.get("ja.tools.reflect.Sample");
rebuildClassFile(c.getClassFile());
trapMethod = c.getDeclaredMethod("trap");
trapStaticMethod = c.getDeclaredMethod("trapStatic");
trapRead = c.getDeclaredMethod("trapRead");
trapWrite = c.getDeclaredMethod("trapWrite");
readParam
= new CtClass[] { classPool.get("java.lang.Object") };
}
catch (NotFoundException e) {
throw new RuntimeException(msg);
} catch (BadBytecode e) {
throw new RuntimeException(msg);
}
}
/**
* Inserts hooks for intercepting accesses to the fields declared
* in reflective classes.
*/
public void onLoad(ClassPool pool, String classname)
throws CannotCompileException, NotFoundException
{
CtClass clazz = pool.get(classname);
clazz.instrument(converter);
}
/**
* Produces a reflective class.
* If the super class is also made reflective, it must be done
* before the sub class.
*
* @param classname the name of the reflective class
* @param metaobject the class name of metaobjects.
* @param metaclass the class name of the class metaobject.
* @return <code>false</code> if the class is already reflective.
*
* @see ja.tools.reflect.Metaobject
* @see ja.tools.reflect.ClassMetaobject
*/
public boolean makeReflective(String classname,
String metaobject, String metaclass)
throws CannotCompileException, NotFoundException
{
return makeReflective(classPool.get(classname),
classPool.get(metaobject),
classPool.get(metaclass));
}
/**
* Produces a reflective class.
* If the super class is also made reflective, it must be done
* before the sub class.
*
* @param clazz the reflective class.
* @param metaobject the class of metaobjects.
* It must be a subclass of
* <code>Metaobject</code>.
* @param metaclass the class of the class metaobject.
* It must be a subclass of
* <code>ClassMetaobject</code>.
* @return <code>false</code> if the class is already reflective.
*
* @see ja.tools.reflect.Metaobject
* @see ja.tools.reflect.ClassMetaobject
*/
public boolean makeReflective(Class clazz,
Class metaobject, Class metaclass)
throws CannotCompileException, NotFoundException
{
return makeReflective(clazz.getName(), metaobject.getName(),
metaclass.getName());
}
/**
* Produces a reflective class. It modifies the given
* <code>CtClass</code> object and makes it reflective.
* If the super class is also made reflective, it must be done
* before the sub class.
*
* @param clazz the reflective class.
* @param metaobject the class of metaobjects.
* It must be a subclass of
* <code>Metaobject</code>.
* @param metaclass the class of the class metaobject.
* It must be a subclass of
* <code>ClassMetaobject</code>.
* @return <code>false</code> if the class is already reflective.
*
* @see ja.tools.reflect.Metaobject
* @see ja.tools.reflect.ClassMetaobject
*/
public boolean makeReflective(CtClass clazz,
CtClass metaobject, CtClass metaclass)
throws CannotCompileException, CannotReflectException,
NotFoundException
{
if (clazz.isInterface())
throw new CannotReflectException(
"Cannot reflect an interface: " + clazz.getName());
if (clazz.subclassOf(classPool.get(classMetaobjectClassName)))
throw new CannotReflectException(
"Cannot reflect a subclass of ClassMetaobject: "
+ clazz.getName());
if (clazz.subclassOf(classPool.get(metaobjectClassName)))
throw new CannotReflectException(
"Cannot reflect a subclass of Metaobject: "
+ clazz.getName());
registerReflectiveClass(clazz);
return modifyClassfile(clazz, metaobject, metaclass);
}
/**
* Registers a reflective class. The field accesses to the instances
* of this class are instrumented.
*/
private void registerReflectiveClass(CtClass clazz) {
CtField[] fs = clazz.getDeclaredFields();
for (int i = 0; i < fs.length; ++i) {
CtField f = fs[i];
int mod = f.getModifiers();
if ((mod & Modifier.PUBLIC) != 0 && (mod & Modifier.FINAL) == 0) {
String name = f.getName();
converter.replaceFieldRead(f, clazz, readPrefix + name);
converter.replaceFieldWrite(f, clazz, writePrefix + name);
}
}
}
private boolean modifyClassfile(CtClass clazz, CtClass metaobject,
CtClass metaclass)
throws CannotCompileException, NotFoundException
{
if (clazz.getAttribute("Reflective") != null)
return false; // this is already reflective.
else
clazz.setAttribute("Reflective", new byte[0]);
CtClass mlevel = classPool.get("ja.tools.reflect.Metalevel");
boolean addMeta = !clazz.subtypeOf(mlevel);
if (addMeta)
clazz.addInterface(mlevel);
processMethods(clazz, addMeta);
processFields(clazz);
CtField f;
if (addMeta) {
f = new CtField(classPool.get("ja.tools.reflect.Metaobject"),
metaobjectField, clazz);
f.setModifiers(Modifier.PROTECTED);
clazz.addField(f, CtField.Initializer.byNewWithParams(metaobject));
clazz.addMethod(CtNewMethod.getter(metaobjectGetter, f));
clazz.addMethod(CtNewMethod.setter(metaobjectSetter, f));
}
f = new CtField(classPool.get("ja.tools.reflect.ClassMetaobject"),
classobjectField, clazz);
f.setModifiers(Modifier.PRIVATE | Modifier.STATIC);
clazz.addField(f, CtField.Initializer.byNew(metaclass,
new String[] { clazz.getName() }));
clazz.addMethod(CtNewMethod.getter(classobjectAccessor, f));
return true;
}
private void processMethods(CtClass clazz, boolean dontSearch)
throws CannotCompileException, NotFoundException
{
CtMethod[] ms = clazz.getMethods();
for (int i = 0; i < ms.length; ++i) {
CtMethod m = ms[i];
int mod = m.getModifiers();
if (Modifier.isPublic(mod) && !Modifier.isAbstract(mod))
processMethods0(mod, clazz, m, i, dontSearch);
}
}
private void processMethods0(int mod, CtClass clazz,
CtMethod m, int identifier, boolean dontSearch)
throws CannotCompileException, NotFoundException
{
CtMethod body;
String name = m.getName();
if (isExcluded(name)) // internally-used method inherited
return; // from a reflective class.
CtMethod m2;
if (m.getDeclaringClass() == clazz) {
if (Modifier.isNative(mod))
return;
m2 = m;
if (Modifier.isFinal(mod)) {
mod &= ~Modifier.FINAL;
m2.setModifiers(mod);
}
}
else {
if (Modifier.isFinal(mod))
return;
mod &= ~Modifier.NATIVE;
m2 = CtNewMethod.delegator(findOriginal(m, dontSearch), clazz);
m2.setModifiers(mod);
clazz.addMethod(m2);
}
m2.setName(ClassMetaobject.methodPrefix + identifier
+ "_" + name);
if (Modifier.isStatic(mod))
body = trapStaticMethod;
else
body = trapMethod;
CtMethod wmethod
= CtNewMethod.wrapped(m.getReturnType(), name,
m.getParameterTypes(), m.getExceptionTypes(),
body, ConstParameter.integer(identifier),
clazz);
wmethod.setModifiers(mod);
clazz.addMethod(wmethod);
}
private CtMethod findOriginal(CtMethod m, boolean dontSearch)
throws NotFoundException
{
if (dontSearch)
return m;
String name = m.getName();
CtMethod[] ms = m.getDeclaringClass().getDeclaredMethods();
for (int i = 0; i < ms.length; ++i) {
String orgName = ms[i].getName();
if (orgName.endsWith(name)
&& orgName.startsWith(ClassMetaobject.methodPrefix)
&& ms[i].getSignature().equals(m.getSignature()))
return ms[i];
}
return m;
}
private void processFields(CtClass clazz)
throws CannotCompileException, NotFoundException
{
CtField[] fs = clazz.getDeclaredFields();
for (int i = 0; i < fs.length; ++i) {
CtField f = fs[i];
int mod = f.getModifiers();
if ((mod & Modifier.PUBLIC) != 0 && (mod & Modifier.FINAL) == 0) {
mod |= Modifier.STATIC;
String name = f.getName();
CtClass ftype = f.getType();
CtMethod wmethod
= CtNewMethod.wrapped(ftype, readPrefix + name,
readParam, null, trapRead,
ConstParameter.string(name),
clazz);
wmethod.setModifiers(mod);
clazz.addMethod(wmethod);
CtClass[] writeParam = new CtClass[2];
writeParam[0] = classPool.get("java.lang.Object");
writeParam[1] = ftype;
wmethod = CtNewMethod.wrapped(CtClass.voidType,
writePrefix + name,
writeParam, null, trapWrite,
ConstParameter.string(name), clazz);
wmethod.setModifiers(mod);
clazz.addMethod(wmethod);
}
}
}
public void rebuildClassFile(ClassFile cf) throws BadBytecode {
if (ClassFile.MAJOR_VERSION < ClassFile.JAVA_6)
return;
Iterator methods = cf.getMethods().iterator();
while (methods.hasNext()) {
MethodInfo mi = (MethodInfo)methods.next();
mi.rebuildStackMap(classPool);
}
}
}
| |
/**
* Copyright (c) 2015 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.rdfparser.model;
import java.util.List;
import java.util.Map;
import org.spdx.rdfparser.IModelContainer;
import org.spdx.rdfparser.InvalidSPDXAnalysisException;
import org.spdx.rdfparser.RdfModelHelper;
import org.spdx.rdfparser.SPDXCreatorInformation;
import org.spdx.rdfparser.SPDXReview;
import org.spdx.rdfparser.SpdxPackageVerificationCode;
import org.spdx.rdfparser.SpdxRdfConstants;
import org.spdx.rdfparser.license.AnyLicenseInfo;
import org.spdx.rdfparser.license.LicenseInfoFactory;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
/**
* The superclass for all classes the use the Jena RDF model.
*
* There are two different lifecycles for objects that subclass RdfModelObject:
* - If there is an existing model which already contains this object, use the static
* method <code>RdfModelObject.createModelObject(ModelContainer container, Node node)</code>
* where the node contains the property values for the class. The subclass
* implementations should implement the population of the Java properties from the
* model. From that point forward, using standard getters and setters will keep
* the Jena model updated along with the Java properties.
*
* - If creating a new object use the constructor and pass in the initial property
* values or use setters to set the property values. To populate the Jena model,
* invoke the method <code>Resource createResource(IModelContainer modelContainer)</code>.
* This create a new resource in the model and populate the Jena model from the
* Java properties. Once this method has been invoked, all subsequent calls to
* setters will update both the Java properties and the Jena RDF property values.
*
* To implement a new RdfModelObject subclass, the following methods must be implemented:
* - Clone: All concrete classes must implement a clone method which will copy the
* Java values but not copy the model data. The clone method can be used to duplicate
* an RdfModelObject in a different Jena model.
* - getType: Return the RDF Resource that describes RDF class associated with the Java class
* - getUri: Returns a URI string for RDF resoures where an absolute URI is required. If null, an anonymous node is created.
* - populateModel: Populates the RDF model from the Java properties
* - equivalent: Returns true if the parameter has the same property values
* - A constructor of the form O(Type1 p1, Type2 p2, ...) where p1, p2, ... are Java properties to initialize the Java object.
* - A constructor of the form O(ModelContainer modelContainer, Node node)
*
* This class implements several common and helper methods including
* methods to find and set resources to the model. The methods to set a resource
* are named <code>setPropertyValue</code> while the methods to find a
* resource value is named <code>findTypePropertyValue</code> where where Type
* is the type of Java object to be found. If no property value is found, null is returned.
*
* @author Gary O'Neall
*
*/
public abstract class RdfModelObject implements IRdfModel, Cloneable {
// the following hashmaps translate between pre-defined
// property values and their URI's used to uniquely identify them
// in the RDF model
static final Map<String, String> PRE_DEFINED_VALUE_URI = Maps.newHashMap();
static final Map<String, String> PRE_DEFINED_URI_VALUE = Maps.newHashMap();
static {
PRE_DEFINED_VALUE_URI.put(SpdxRdfConstants.NOASSERTION_VALUE, SpdxRdfConstants.URI_VALUE_NOASSERTION);
PRE_DEFINED_URI_VALUE.put(SpdxRdfConstants.URI_VALUE_NOASSERTION, SpdxRdfConstants.NOASSERTION_VALUE);
PRE_DEFINED_VALUE_URI.put(SpdxRdfConstants.NONE_VALUE, SpdxRdfConstants.URI_VALUE_NONE);
PRE_DEFINED_URI_VALUE.put(SpdxRdfConstants.URI_VALUE_NONE, SpdxRdfConstants.NONE_VALUE);
}
protected Model model;
protected Resource resource;
protected Node node;
protected IModelContainer modelContainer;
static RdfModelObject rdfModelObject;
/**
* Force a refresh for the model on every property get. This is slower, but
* will make sure that the correct value is returned if there happens to be
* two Java objects using the same RDF properties.
*
* The property should be set based on if there are more than two objects
* for the same node in the container containing this model
*/
protected boolean refreshOnGet = true;
/**
* Create an RDF Model Object based on an existing Node
* @param modelContainer Container containing the RDF Model
* @param node Node describing this object
* @throws InvalidSPDXAnalysisException
*/
public RdfModelObject(IModelContainer modelContainer, Node node) throws InvalidSPDXAnalysisException {
this.modelContainer = modelContainer;
this.model = modelContainer.getModel();
this.node = node;
this.refreshOnGet = modelContainer.addCheckNodeObject(node, this);
if (node.isBlank()) {
resource = model.createResource(node.getBlankNodeId());
} else if (node.isURI()) {
resource = model.createResource(node.getURI());
} else {
throw(new InvalidSPDXAnalysisException("Can not have an model node as a literal"));
}
}
/**
* Create an Rdf Model Object without any associated nodes. It is assumed that
* populate model will be called to intialize the model
*/
public RdfModelObject() {
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.IRdfModel#createResource(com.hp.hpl.jena.rdf.model.Model, java.lang.String)
*/
@Override
public Resource createResource(IModelContainer modelContainer) throws InvalidSPDXAnalysisException {
return createResource(modelContainer, true);
}
/**
* @param modelContainer
* @param updateModel If true, update the model from the element. If false, update the
* element from the model. This is used for relationships to make sure we don't overwrite
* the original element when setting the related element property value.
* @return
* @throws InvalidSPDXAnalysisException
*/
public Resource createResource(IModelContainer modelContainer, boolean updateModel) throws InvalidSPDXAnalysisException {
if (this.modelContainer != null && this.modelContainer.equals(modelContainer) &&
this.resource != null) {
if (!this.resource.isURIResource() || this.resource.getURI().equals(getUri(modelContainer))) {
return this.resource;
}
}
String uri = getUri(modelContainer);
Resource duplicate = findDuplicateResource(modelContainer, uri);
// we need to wait to set the following to fields since they are checked
// by some of the setters
this.modelContainer = modelContainer;
this.model = modelContainer.getModel();
this.resource = modelContainer.createResource(duplicate, uri, getType(model), this);
this.node = this.resource.asNode();
if (duplicate == null || updateModel) {
populateModel();
} else {
getPropertiesFromModel();
}
return resource;
};
/**
* Fetch all of the properties from the model and populate the local Java properties
* @throws InvalidSPDXAnalysisException
*/
public abstract void getPropertiesFromModel() throws InvalidSPDXAnalysisException;
/**
* Search the model to see if there is a duplicate resource either based on the
* URI or based on other information. Subclasses may choose to override this
* method to prevent duplicate resource from being created with the same properties.
* @param modelContainer
* @param uri
* @return Any duplicate resource found. Null if no duplicate resource was found.
* @throws InvalidSPDXAnalysisException
*/
public Resource findDuplicateResource(IModelContainer modelContainer, String uri) throws InvalidSPDXAnalysisException {
return RdfModelHelper.findDuplicateResource(modelContainer, uri);
}
/**
* Get the URI for this RDF object. Null if this is for an anonomous node.
* @param modelContainer
* @return
* @throws InvalidSPDXAnalysisException
*/
public abstract String getUri(IModelContainer modelContainer) throws InvalidSPDXAnalysisException;
/**
* @return the RDF class name for the object
*/
public abstract Resource getType(Model model);
/**
* Populate the RDF model from the Java properties
* @throws InvalidSPDXAnalysisException
*/
public abstract void populateModel() throws InvalidSPDXAnalysisException;
/**
* Returns true if the two resources represent the same node
* @param r1
* @param r2
* @return
*/
protected boolean resourcesEqual(Resource r1,
Resource r2) {
if (r1 == null) {
return (r2 == null);
}
if (r2 == null) {
return false;
}
if (r1.isAnon()) {
if (!r2.isAnon()) {
return false;
}
return r1.getId().equals(r2.getId());
} else {
if (!r2.isURIResource()) {
return false;
}
return r1.getURI().equals(r2.getURI());
}
}
@Override
public boolean equals(Object o) {
if (!(o instanceof RdfModelObject)) {
return false;
}
RdfModelObject comp = (RdfModelObject)o;
if (comp.resource != null || this.resource != null) {
// We consider them to be equal if they represent the same
// resource nocde
return resourcesEqual(comp.resource, this.resource);
} else {
return super.equals(o);
}
}
@Override
public int hashCode() {
if (this.resource != null) {
return this.resource.hashCode() ^ model.hashCode();
} else {
return super.hashCode();
}
}
/**
* Finds all SPDX elements with a subject of this object
* @param namespace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected SpdxElement[] findMultipleElementPropertyValues(String namespace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return null;
}
Node p = model.getProperty(namespace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
List<SpdxElement> retval = Lists.newArrayList();
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(SpdxElementFactory.createElementFromModel(modelContainer,
t.getObject()));
}
return retval.toArray(new SpdxElement[retval.size()]);
}
/**
* Find an SPDX element with a subject of this object
* @param namespace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected SpdxElement findElementPropertyValue(String namespace,
String propertyName) throws InvalidSPDXAnalysisException {
SpdxElement[] elements = findMultipleElementPropertyValues(namespace, propertyName);
if (elements != null && elements.length > 0) {
return elements[0];
} else {
return null;
}
}
/**
* Find a property value with a subject of this object
* @param namespace Namespace for the property name
* @param propertyName Name of the property
* @return The string value of the property or null if no property exists
*/
public String findSinglePropertyValue(String namespace, String propertyName) {
if (this.model == null || this.node == null) {
return null;
}
Node p = model.getProperty(namespace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
if (t.getObject().isURI()) {
// check for predefined
String retval = PRE_DEFINED_URI_VALUE.get(t.getObject().getURI());
if (retval != null) {
return retval;
}
}
return t.getObject().toString(false);
}
return null;
}
/**
* Finds multiple property values with a subject of this object
* @param namespace Namespace for the property name
* @param propertyName Name of the property
* @return The string value of the property or null if no property exists
*/
public String[] findMultiplePropertyValues(String namespace, String propertyName) {
if (this.model == null || this.node == null) {
return null;
}
List<String> retval = Lists.newArrayList();
Node p = model.getProperty(namespace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
if (t.getObject().isURI() && PRE_DEFINED_URI_VALUE.containsKey(t.getObject().getURI())) {
retval.add(PRE_DEFINED_URI_VALUE.get(t.getObject().getURI()));
} else {
retval.add(t.getObject().toString(false));
}
}
return retval.toArray(new String[retval.size()]);
}
/**
* Set a property values for this resource. Clears any existing resource.
* If the string matches one of the SPDX pre-defined string values, the URI
* for that value is stored. Otherwise, it is stored as a literal value.
* @param nameSpace RDF Namespace for the property
* @param propertyName RDF Property Name (the RDF
* @param values Values to associate to this resource
*/
protected void setPropertyValue(String nameSpace, String propertyName,
String[] values) {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (values != null) {
for (int i = 0; i < values.length; i++) {
if (values[i] != null) {
String valueUri = PRE_DEFINED_VALUE_URI.get(values[i]);
if (valueUri != null) {
// this is a pre-defined "special" SPDX value
Resource valueResource = this.model.createResource(valueUri);
this.resource.addProperty(p, valueResource);
} else {
this.resource.addProperty(p, values[i]);
}
}
}
}
}
}
/**
* Sets the spdx element property value for this resource
* @param nameSpace
* @param propertyName
* @param element
* @param updateModel If true, update the model from the element. If false, update the
* element from the model. This is used for relationships to make sure we don't overwrite
* the original element when setting the related element property value.
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace, String propertyName,
SpdxElement[] elements, boolean updateModel) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (elements != null) {
for (int i = 0; i < elements.length; i++) {
if (elements[i] != null) {
this.resource.addProperty(p, elements[i].createResource(modelContainer, updateModel));
}
}
}
}
}
/**
* Sets the spdx element property value for this resource
* @param nameSpace
* @param propertyName
* @param element
* @param updateModel If true, update the model from the element. If false, update the
* element from the model. This is used for relationships to make sure we don't overwrite
* the original element when setting the related element property value.
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace, String propertyName,
SpdxElement element, boolean updateModel) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (element != null) {
this.resource.addProperty(p, element.createResource(modelContainer, updateModel));
}
}
}
/**
* Adds an SPDX element property value for this resource without removing the old property values
* @param nameSpace
* @param propertyName
* @param element
* @param updateModel If true, update the model from the element. If false, update the
* element from the model. This is used for relationships to make sure we don't overwrite
* the original element when setting the related element property value.
* @throws InvalidSPDXAnalysisException
*/
protected void addPropertyValue(String nameSpace, String propertyName,
SpdxElement element, boolean updateModel) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
if (element != null) {
this.resource.addProperty(p, element.createResource(modelContainer, updateModel));
}
}
}
/**
* Adds an SPDX element property value for this resource without removing the old property values
* @param nameSpace
* @param propertyName
* @param element
* @throws InvalidSPDXAnalysisException
*/
protected void addPropertyValue(String nameSpace, String propertyName,
SpdxElement element) throws InvalidSPDXAnalysisException {
addPropertyValue(nameSpace, propertyName, element, true);
}
protected void setPropertyValue(String nameSpace, String propertyName,
SpdxElement element) throws InvalidSPDXAnalysisException {
setPropertyValue(nameSpace, propertyName, element, true);
}
protected void setPropertyValue(String nameSpace, String propertyName,
SpdxElement[] element) throws InvalidSPDXAnalysisException {
setPropertyValue(nameSpace, propertyName, element, true);
}
protected void setPropertyValues(String nameSpace, String propertyName,
Annotation[] annotations) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (annotations != null) {
for (int i = 0; i < annotations.length; i++) {
this.resource.addProperty(p, annotations[i].createResource(modelContainer));
}
}
}
}
protected void addPropertyValue(String nameSpace, String propertyName,
Annotation annotation) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
if (annotation != null) {
this.resource.addProperty(p, annotation.createResource(modelContainer));
}
}
}
/**
* Find all annotations with a subject of this object
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected Annotation[] findAnnotationPropertyValues(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return null;
}
List<Annotation> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new Annotation(this.modelContainer, t.getObject()));
}
return retval.toArray(new Annotation[retval.size()]);
}
/**
* Find all annotations with a subject of this object
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected Relationship[] findRelationshipPropertyValues(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return null;
}
List<Relationship> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new Relationship(this.modelContainer, t.getObject()));
}
return retval.toArray(new Relationship[retval.size()]);
}
/**
* Set a property value for this resource. Clears any existing resource.
* @param nameSpace RDF Namespace for the property
* @param propertyName RDF Property Name
* @param value Values to set
*/
protected void setPropertyValue(String nameSpace, String propertyName,
String value) {
setPropertyValue(nameSpace, propertyName, new String[] {value});
}
/**
* Removes all property values for this resource.
* @param nameSpace RDF Namespace for the property
* @param propertyName RDF Property Name
*/
protected void removePropertyValue(String nameSpace, String propertyName) {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
}
}
/**
* Set a property value for this resource. Clears any existing resource.
* @param nameSpace
* @param propertyName
* @param relationships
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValues(String nameSpace,
String propertyName, Relationship[] relationships) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (relationships != null) {
for (int i = 0; i < relationships.length; i++) {
this.resource.addProperty(p, relationships[i].createResource(modelContainer));
}
}
}
}
/**
* Add a relationship property value
* @param nameSpace
* @param propertyName
* @param relationship
* @throws InvalidSPDXAnalysisException
*/
protected void addPropertyValue(String nameSpace,
String propertyName, Relationship relationship) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
if (relationship != null) {
this.resource.addProperty(p, relationship.createResource(modelContainer));
}
}
}
/**
* Set a property value for this resource. Clears any existing resource.
* @param nameSpace
* @param propertyName
* @param licenses
* @throws InvalidSPDXAnalysisException
*/
public void setPropertyValues(String nameSpace,
String propertyName, AnyLicenseInfo[] licenses) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (licenses != null) {
for (int i = 0; i < licenses.length; i++) {
if (licenses[i] != null) {
this.resource.addProperty(p, licenses[i].createResource(this.modelContainer));
}
}
}
}
}
/**
* Add a property value of type AnyLicenseInfo without removing the existing property values
* @param nameSpace
* @param propertyName
* @param license
* @throws InvalidSPDXAnalysisException
*/
public void addPropertyValue (String nameSpace,
String propertyName, AnyLicenseInfo license) throws InvalidSPDXAnalysisException {
if (model != null && resource != null && license != null) {
Property p = model.createProperty(nameSpace, propertyName);
this.resource.addProperty(p, license.createResource(this.modelContainer));
}
}
/**
* Set a property value for this resource. Clears any existing resource.
* @param nameSpace
* @param propertyName
* @param license
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace,
String propertyName, AnyLicenseInfo license) throws InvalidSPDXAnalysisException {
setPropertyValues(nameSpace, propertyName, new AnyLicenseInfo[] {license});
}
/**
* Find a property value with a subject of this object
* @param namespace Namespace for the property name
* @param propertyName Name of the property
* @return The AnyLicenseInfo value of the property or null if no property exists
* @throws InvalidSPDXAnalysisException
*/
public AnyLicenseInfo[] findAnyLicenseInfoPropertyValues(String namespace, String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return new AnyLicenseInfo[0];
}
List<AnyLicenseInfo> retval = Lists.newArrayList();
Node p = model.getProperty(namespace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(LicenseInfoFactory.getLicenseInfoFromModel(modelContainer, t.getObject()));
}
return retval.toArray(new AnyLicenseInfo[retval.size()]);
}
/**
* Find a property value with a subject of this object
* @param namespace Namespace for the property name
* @param propertyName Name of the property
* @return The AnyLicenseInfo value of the property or null if no property exists
* @throws InvalidSPDXAnalysisException
*/
public AnyLicenseInfo findAnyLicenseInfoPropertyValue(String namespace, String propertyName) throws InvalidSPDXAnalysisException {
AnyLicenseInfo[] licenses = findAnyLicenseInfoPropertyValues(namespace, propertyName);
if (licenses == null || licenses.length == 0) {
return null;
} else {
return licenses[0];
}
}
/**
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected Checksum[] findMultipleChecksumPropertyValues(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return new Checksum[0];
}
List<Checksum> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new Checksum(modelContainer, t.getObject()));
}
return retval.toArray(new Checksum[retval.size()]);
}
/**
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected Checksum findChecksumPropertyValue(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
Checksum[] checksums = findMultipleChecksumPropertyValues(nameSpace, propertyName);
if (checksums == null || checksums.length == 0) {
return null;
} else {
return checksums[0];
}
}
/**
* Add a checksum as a property to this resource
* @param nameSpace
* @param propertyName
* @param checksumValues
* @throws InvalidSPDXAnalysisException
*/
protected void addPropertyValue(String nameSpace,
String propertyName, Checksum checksumValue) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
if (checksumValue != null) {
this.resource.addProperty(p, checksumValue.createResource(this.modelContainer));
}
}
}
/**
* @param nameSpace
* @param propertyName
* @param checksumValues
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValues(String nameSpace,
String propertyName, Checksum[] checksumValues) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (checksumValues != null) {
for (int i = 0; i < checksumValues.length; i++) {
if (checksumValues[i] != null) {
this.resource.addProperty(p, checksumValues[i].createResource(this.modelContainer));
}
}
}
}
}
/**
* @param nameSpace
* @param propertyName
* @param checksumValue
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace,
String propertyName, Checksum checksumValue) throws InvalidSPDXAnalysisException {
if (checksumValue == null) {
setPropertyValues(nameSpace, propertyName, new Checksum[0]);
} else {
setPropertyValues(nameSpace, propertyName, new Checksum[] {checksumValue});
}
}
/**
* @param nameSpace
* @param propertyName
* @param checksumValue
* @return
* @throws InvalidSPDXAnalysisException
*/
protected DoapProject[] findMultipleDoapPropertyValues(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return new DoapProject[0];
}
List<DoapProject> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new DoapProject(modelContainer, t.getObject()));
}
return retval.toArray(new DoapProject[retval.size()]);
}
/**
* @param nameSpace
* @param propertyName
* @param doapProjectValues
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace,
String propertyName, DoapProject[] doapProjectValues) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (doapProjectValues != null) {
for (int i = 0; i < doapProjectValues.length; i++) {
this.resource.addProperty(p, doapProjectValues[i].createResource(this.modelContainer));
}
}
}
}
/**
* Find a single URI as a property value to this node
* @param namespace
* @param propertyName
* @return
*/
protected String findUriPropertyValue(String namespace,
String propertyName) {
String[] values = findUriPropertyValues(namespace, propertyName);
if (values == null || values.length == 0) {
return null;
} else {
return values[0];
}
}
/**
* Find a single URI as a property value to this node
* @param namespace
* @param propertyName
* @return
*/
protected String[] findUriPropertyValues(String namespace,
String propertyName) {
if (this.model == null || this.node == null) {
return new String[0];
}
Node p = model.getProperty(namespace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
List<String> retval = Lists.newArrayList();
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
if (t.getObject().isURI()) {
retval.add(model.expandPrefix(t.getObject().getURI()));
}
}
return retval.toArray(new String[retval.size()]);
}
/**
* Sets a property value as a list of Uris
* @param nameSpace
* @param propertyName
* @param uri
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyUriValues(String nameSpace,
String propertyName, String[] uris) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (uris != null) {
for (int i = 0; i < uris.length; i++) {
Resource uriResource = model.createResource(uris[i]);
this.resource.addProperty(p, uriResource);
}
}
}
}
/**
* Adds a property value as a list of Uris
* @param nameSpace
* @param propertyName
* @param uri
* @throws InvalidSPDXAnalysisException
*/
protected void addPropertyUriValue(String nameSpace,
String propertyName, String uri) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
if (uri != null) {
Resource uriResource = model.createResource(uri);
this.resource.addProperty(p, uriResource);
}
}
}
/**
* Sets a property value as a Uri
* @param nameSpace
* @param propertyName
* @param uri
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyUriValue(String nameSpace,
String propertyName, String uri) throws InvalidSPDXAnalysisException {
setPropertyUriValues(nameSpace, propertyName, new String[] {uri});
}
/**
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected SPDXCreatorInformation findCreationInfoPropertyValue(
String nameSpace, String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return null;
}
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
return new SPDXCreatorInformation(model, t.getObject());
}
return null;
}
/**
* @param nameSpace
* @param propertyName
* @param creatorInfo
*/
protected void setPropertyValue(String nameSpace,
String propertyName, SPDXCreatorInformation creatorInfo) {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (creatorInfo != null) {
this.resource.addProperty(p, creatorInfo.createResource(model));
}
}
}
public ExternalDocumentRef[] findExternalDocRefPropertyValues(
String nameSpace, String propertyName) throws InvalidSPDXAnalysisException {
return findExternalDocRefPropertyValues(nameSpace, propertyName,
this.modelContainer, this.node);
}
/**
* @param nameSpace
* @param propSpdxExternalDocRef
* @return
* @throws InvalidSPDXAnalysisException
*/
public static ExternalDocumentRef[] findExternalDocRefPropertyValues(
String nameSpace, String propertyName, IModelContainer extDocModelContainer,
Node nodeContainingExternalRefs) throws InvalidSPDXAnalysisException {
if (extDocModelContainer == null || nodeContainingExternalRefs == null) {
return new ExternalDocumentRef[0];
}
Model model = extDocModelContainer.getModel();
List<ExternalDocumentRef> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(nodeContainingExternalRefs, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new ExternalDocumentRef(extDocModelContainer, t.getObject()));
}
return retval.toArray(new ExternalDocumentRef[retval.size()]);
}
/**
* @param nameSpace
* @param propertyName
* @param externalDocRefs
* @throws InvalidSPDXAnalysisException
*/
public void setPropertyValues(String nameSpace, String propertyName,
ExternalDocumentRef[] externalDocRefs) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (externalDocRefs != null) {
for (int i = 0; i < externalDocRefs.length; i++) {
this.resource.addProperty(p, externalDocRefs[i].createResource(modelContainer));
}
}
}
}
/**
* @param nameSpace
* @param propertyName
* @param reviewers
*/
protected void setPropertyValues(String nameSpace,
String propertyName, SPDXReview[] reviewers) {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (reviewers != null) {
for (int i = 0; i < reviewers.length; i++) {
this.resource.addProperty(p, reviewers[i].createResource(model));
}
}
}
}
/**
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected SPDXReview[] findReviewPropertyValues(String nameSpace,
String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return new SPDXReview[0];
}
List<SPDXReview> retval = Lists.newArrayList();
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
retval.add(new SPDXReview(model, t.getObject()));
}
return retval.toArray(new SPDXReview[retval.size()]);
}
/**
* @param nameSpace
* @param propertyName
* @return
* @throws InvalidSPDXAnalysisException
*/
protected SpdxPackageVerificationCode findVerificationCodePropertyValue(
String nameSpace,String propertyName) throws InvalidSPDXAnalysisException {
if (this.model == null || this.node == null) {
return null;
}
Node p = model.getProperty(nameSpace, propertyName).asNode();
Triple m = Triple.createMatch(node, p, null);
ExtendedIterator<Triple> tripleIter = model.getGraph().find(m);
while (tripleIter.hasNext()) {
Triple t = tripleIter.next();
return new SpdxPackageVerificationCode(model, t.getObject());
}
return null;
}
/**
* @param nameSpace
* @param propertyName
* @param verificationCode
* @throws InvalidSPDXAnalysisException
*/
protected void setPropertyValue(String nameSpace,
String propertyName, SpdxPackageVerificationCode verificationCode) throws InvalidSPDXAnalysisException {
if (model != null && resource != null) {
Property p = model.createProperty(nameSpace, propertyName);
model.removeAll(this.resource, p, null);
if (verificationCode != null) {
this.resource.addProperty(p, verificationCode.createResource(model));
}
}
}
/**
* Compares 2 arrays to see if the property values for the element RdfModelObjects are the same independent of
* order and considering nulls
* @param array1
* @param array2
* @return
*/
public boolean arraysEquivalent(IRdfModel[] array1, IRdfModel[] array2) {
return RdfModelHelper.arraysEquivalent(array1, array2);
}
/**
* Compares the properties of two RdfModelObjects considering possible null values
* @param o1
* @param o2
* @return
*/
public boolean equivalentConsideringNull(IRdfModel o1, IRdfModel o2) {
if (o1 == o2) {
return true;
}
return RdfModelHelper.equivalentConsideringNull(o1, o2);
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.IRdfModel#setMultipleObjectsForSameNode()
*/
@Override
public void setMultipleObjectsForSameNode() {
this.refreshOnGet = true;
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.IRdfModel#setSingleObjectForSameNode()
*/
@Override
public void setSingleObjectForSameNode() {
this.refreshOnGet = false;
}
/**
* @return true if every get of a property will be refreshsed from the RDF Model - primarily used for unit testing
*/
public boolean isRefreshOnGet() {
return this.refreshOnGet;
}
/**
* @return the RDF Node (null if not initialized)
*/
public Node getNode() {
return this.node;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.search;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import hudson.model.FreeStyleProject;
import hudson.model.ListView;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import hudson.model.User;
import hudson.model.View;
import hudson.security.ACL;
import hudson.security.ACLContext;
import hudson.security.AuthorizationStrategy;
import hudson.security.GlobalMatrixAuthorizationStrategy;
import jenkins.model.Jenkins;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import net.sf.json.JSONSerializer;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.Issue;
import org.jvnet.hudson.test.JenkinsRule;
import org.jvnet.hudson.test.JenkinsRule.WebClient;
import org.jvnet.hudson.test.MockAuthorizationStrategy;
import org.jvnet.hudson.test.MockFolder;
import com.gargoylesoftware.htmlunit.AlertHandler;
import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException;
import com.gargoylesoftware.htmlunit.Page;
/**
* @author Kohsuke Kawaguchi
*/
public class SearchTest {
@Rule public JenkinsRule j = new JenkinsRule();
/**
* No exact match should result in a failure status code.
*/
@Test
public void testFailure() throws Exception {
try {
j.search("no-such-thing");
fail("404 expected");
} catch (FailingHttpStatusCodeException e) {
assertEquals(404,e.getResponse().getStatusCode());
}
}
/**
* Makes sure the script doesn't execute.
*/
@Issue("JENKINS-3415")
@Test
public void testXSS() throws Exception {
try {
WebClient wc = j.createWebClient();
wc.setAlertHandler(new AlertHandler() {
public void handleAlert(Page page, String message) {
throw new AssertionError();
}
});
wc.search("<script>alert('script');</script>");
fail("404 expected");
} catch (FailingHttpStatusCodeException e) {
assertEquals(404,e.getResponse().getStatusCode());
}
}
@Test
public void testSearchByProjectName() throws Exception {
final String projectName = "testSearchByProjectName";
j.createFreeStyleProject(projectName);
Page result = j.search(projectName);
assertNotNull(result);
j.assertGoodStatus(result);
// make sure we've fetched the testSearchByDisplayName project page
String contents = result.getWebResponse().getContentAsString();
assertTrue(contents.contains(String.format("<title>%s [Jenkins]</title>", projectName)));
}
@Issue("JENKINS-24433")
@Test
public void testSearchByProjectNameBehindAFolder() throws Exception {
FreeStyleProject myFreeStyleProject = j.createFreeStyleProject("testSearchByProjectName");
MockFolder myMockFolder = j.createFolder("my-folder-1");
Page result = j.createWebClient().goTo(myMockFolder.getUrl() + "search?q="+ myFreeStyleProject.getName());
assertNotNull(result);
j.assertGoodStatus(result);
URL resultUrl = result.getUrl();
assertTrue(resultUrl.toString().equals(j.getInstance().getRootUrl() + myFreeStyleProject.getUrl()));
}
@Issue("JENKINS-24433")
@Test
public void testSearchByProjectNameInAFolder() throws Exception {
MockFolder myMockFolder = j.createFolder("my-folder-1");
FreeStyleProject myFreeStyleProject = myMockFolder.createProject(FreeStyleProject.class, "my-job-1");
Page result = j.createWebClient().goTo(myMockFolder.getUrl() + "search?q=" + myFreeStyleProject.getFullName());
assertNotNull(result);
j.assertGoodStatus(result);
URL resultUrl = result.getUrl();
assertTrue(resultUrl.toString().equals(j.getInstance().getRootUrl() + myFreeStyleProject.getUrl()));
}
@Test
public void testSearchByDisplayName() throws Exception {
final String displayName = "displayName9999999";
FreeStyleProject project = j.createFreeStyleProject("testSearchByDisplayName");
project.setDisplayName(displayName);
Page result = j.search(displayName);
assertNotNull(result);
j.assertGoodStatus(result);
// make sure we've fetched the testSearchByDisplayName project page
String contents = result.getWebResponse().getContentAsString();
assertTrue(contents.contains(String.format("<title>%s [Jenkins]</title>", displayName)));
}
@Test
public void testSearch2ProjectsWithSameDisplayName() throws Exception {
// create 2 freestyle projects with the same display name
final String projectName1 = "projectName1";
final String projectName2 = "projectName2";
final String projectName3 = "projectName3";
final String displayName = "displayNameFoo";
final String otherDisplayName = "otherDisplayName";
FreeStyleProject project1 = j.createFreeStyleProject(projectName1);
project1.setDisplayName(displayName);
FreeStyleProject project2 = j.createFreeStyleProject(projectName2);
project2.setDisplayName(displayName);
FreeStyleProject project3 = j.createFreeStyleProject(projectName3);
project3.setDisplayName(otherDisplayName);
// make sure that on search we get back one of the projects, it doesn't
// matter which one as long as the one that is returned has displayName
// as the display name
Page result = j.search(displayName);
assertNotNull(result);
j.assertGoodStatus(result);
// make sure we've fetched the testSearchByDisplayName project page
String contents = result.getWebResponse().getContentAsString();
assertTrue(contents.contains(String.format("<title>%s [Jenkins]</title>", displayName)));
assertFalse(contents.contains(otherDisplayName));
}
@Test
public void testProjectNamePrecedesDisplayName() throws Exception {
final String project1Name = "foo";
final String project1DisplayName = "project1DisplayName";
final String project2Name = "project2Name";
final String project2DisplayName = project1Name;
final String project3Name = "project3Name";
final String project3DisplayName = "project3DisplayName";
// create 1 freestyle project with the name foo
FreeStyleProject project1 = j.createFreeStyleProject(project1Name);
project1.setDisplayName(project1DisplayName);
// create another with the display name foo
FreeStyleProject project2 = j.createFreeStyleProject(project2Name);
project2.setDisplayName(project2DisplayName);
// create a third project and make sure it's not picked up by search
FreeStyleProject project3 = j.createFreeStyleProject(project3Name);
project3.setDisplayName(project3DisplayName);
// search for foo
Page result = j.search(project1Name);
assertNotNull(result);
j.assertGoodStatus(result);
// make sure we get the project with the name foo
String contents = result.getWebResponse().getContentAsString();
assertTrue(contents.contains(String.format("<title>%s [Jenkins]</title>", project1DisplayName)));
// make sure projects 2 and 3 were not picked up
assertFalse(contents.contains(project2Name));
assertFalse(contents.contains(project3Name));
assertFalse(contents.contains(project3DisplayName));
}
@Test
public void testGetSuggestionsHasBothNamesAndDisplayNames() throws Exception {
final String projectName = "project name";
final String displayName = "display name";
FreeStyleProject project1 = j.createFreeStyleProject(projectName);
project1.setDisplayName(displayName);
WebClient wc = j.createWebClient();
Page result = wc.goTo("search/suggest?query=name", "application/json");
assertNotNull(result);
j.assertGoodStatus(result);
String content = result.getWebResponse().getContentAsString();
System.out.println(content);
JSONObject jsonContent = (JSONObject)JSONSerializer.toJSON(content);
assertNotNull(jsonContent);
JSONArray jsonArray = jsonContent.getJSONArray("suggestions");
assertNotNull(jsonArray);
assertEquals(2, jsonArray.size());
boolean foundProjectName = false;
boolean foundDisplayName = false;
for(Object suggestion : jsonArray) {
JSONObject jsonSuggestion = (JSONObject)suggestion;
String name = (String)jsonSuggestion.get("name");
if(projectName.equals(name)) {
foundProjectName = true;
}
else if(displayName.equals(name)) {
foundDisplayName = true;
}
}
assertTrue(foundProjectName);
assertTrue(foundDisplayName);
}
@Issue("JENKINS-24433")
@Test
public void testProjectNameBehindAFolderDisplayName() throws Exception {
final String projectName1 = "job-1";
final String displayName1 = "job-1 display";
final String projectName2 = "job-2";
final String displayName2 = "job-2 display";
FreeStyleProject project1 = j.createFreeStyleProject(projectName1);
project1.setDisplayName(displayName1);
MockFolder myMockFolder = j.createFolder("my-folder-1");
FreeStyleProject project2 = myMockFolder.createProject(FreeStyleProject.class, projectName2);
project2.setDisplayName(displayName2);
WebClient wc = j.createWebClient();
Page result = wc.goTo(myMockFolder.getUrl() + "search/suggest?query=" + projectName1, "application/json");
assertNotNull(result);
j.assertGoodStatus(result);
String content = result.getWebResponse().getContentAsString();
JSONObject jsonContent = (JSONObject)JSONSerializer.toJSON(content);
assertNotNull(jsonContent);
JSONArray jsonArray = jsonContent.getJSONArray("suggestions");
assertNotNull(jsonArray);
assertEquals(2, jsonArray.size());
boolean foundDisplayName = false;
for(Object suggestion : jsonArray) {
JSONObject jsonSuggestion = (JSONObject)suggestion;
String name = (String)jsonSuggestion.get("name");
if(projectName1.equals(name)) {
foundDisplayName = true;
}
}
assertTrue(foundDisplayName);
}
@Issue("JENKINS-24433")
@Test
public void testProjectNameInAFolderDisplayName() throws Exception {
final String projectName1 = "job-1";
final String displayName1 = "job-1 display";
final String projectName2 = "job-2";
final String displayName2 = "my-folder-1 job-2";
FreeStyleProject project1 = j.createFreeStyleProject(projectName1);
project1.setDisplayName(displayName1);
MockFolder myMockFolder = j.createFolder("my-folder-1");
FreeStyleProject project2 = myMockFolder.createProject(FreeStyleProject.class, projectName2);
project2.setDisplayName(displayName2);
WebClient wc = j.createWebClient();
Page result = wc.goTo(myMockFolder.getUrl() + "search/suggest?query=" + projectName2, "application/json");
assertNotNull(result);
j.assertGoodStatus(result);
String content = result.getWebResponse().getContentAsString();
JSONObject jsonContent = (JSONObject)JSONSerializer.toJSON(content);
assertNotNull(jsonContent);
JSONArray jsonArray = jsonContent.getJSONArray("suggestions");
assertNotNull(jsonArray);
assertEquals(1, jsonArray.size());
boolean foundDisplayName = false;
for(Object suggestion : jsonArray) {
JSONObject jsonSuggestion = (JSONObject)suggestion;
String name = (String)jsonSuggestion.get("name");
if(displayName2.equals(name)) {
foundDisplayName = true;
}
}
assertTrue(foundDisplayName);
}
/**
* Disable/enable status shouldn't affect the search
*/
@Issue("JENKINS-13148")
@Test
public void testDisabledJobShouldBeSearchable() throws Exception {
FreeStyleProject p = j.createFreeStyleProject("foo-bar");
assertTrue(suggest(j.jenkins.getSearchIndex(), "foo").contains(p));
p.disable();
assertTrue(suggest(j.jenkins.getSearchIndex(), "foo").contains(p));
}
/**
* All top-level jobs should be searchable, not just jobs in the current view.
*/
@Issue("JENKINS-13148")
@Test
public void testCompletionOutsideView() throws Exception {
FreeStyleProject p = j.createFreeStyleProject("foo-bar");
ListView v = new ListView("empty1",j.jenkins);
ListView w = new ListView("empty2",j.jenkins);
j.jenkins.addView(v);
j.jenkins.addView(w);
j.jenkins.setPrimaryView(w);
// new view should be empty
assertFalse(v.contains(p));
assertFalse(w.contains(p));
assertFalse(j.jenkins.getPrimaryView().contains(p));
assertTrue(suggest(j.jenkins.getSearchIndex(),"foo").contains(p));
}
@Issue("SECURITY-385")
@Test
public void testInaccessibleViews() throws IOException {
j.jenkins.setSecurityRealm(j.createDummySecurityRealm());
GlobalMatrixAuthorizationStrategy strategy = new GlobalMatrixAuthorizationStrategy();
strategy.add(Jenkins.READ, "alice");
j.jenkins.setAuthorizationStrategy(strategy);
j.jenkins.addView(new ListView("foo", j.jenkins));
// SYSTEM can see all the views
assertEquals("two views exist", 2, Jenkins.getInstance().getViews().size());
List<SearchItem> results = new ArrayList<>();
j.jenkins.getSearchIndex().suggest("foo", results);
assertEquals("nonempty results list", 1, results.size());
// Alice can't
assertFalse("no permission", j.jenkins.getView("foo").hasPermission(User.get("alice").impersonate(), View.READ));
ACL.impersonate(User.get("alice").impersonate(), new Runnable() {
@Override
public void run() {
assertEquals("no visible views", 0, Jenkins.getInstance().getViews().size());
List<SearchItem> results = new ArrayList<>();
j.jenkins.getSearchIndex().suggest("foo", results);
assertEquals("empty results list", Collections.emptyList(), results);
}
});
}
@Test
public void testSearchWithinFolders() throws Exception {
MockFolder folder1 = j.createFolder("folder1");
FreeStyleProject p1 = folder1.createProject(FreeStyleProject.class, "myjob");
MockFolder folder2 = j.createFolder("folder2");
FreeStyleProject p2 = folder2.createProject(FreeStyleProject.class, "myjob");
List<SearchItem> suggest = suggest(j.jenkins.getSearchIndex(), "myjob");
assertTrue(suggest.contains(p1));
assertTrue(suggest.contains(p2));
}
@Test
@Issue("JENKINS-7874")
public void adminOnlyLinksNotShownToRegularUser() {
j.jenkins.setSecurityRealm(j.createDummySecurityRealm());
MockAuthorizationStrategy mas = new MockAuthorizationStrategy();
mas.grant(Jenkins.READ).onRoot().toEveryone();
j.jenkins.setAuthorizationStrategy(mas);
try(ACLContext _ = ACL.as(User.get("alice"))) {
List<SearchItem> results = new ArrayList<>();
j.jenkins.getSearchIndex().find("config", results);
j.jenkins.getSearchIndex().find("manage", results);
j.jenkins.getSearchIndex().find("log", results);
assertEquals("empty results list", 0, results.size());
}
}
private List<SearchItem> suggest(SearchIndex index, String term) {
List<SearchItem> result = new ArrayList<SearchItem>();
index.suggest(term, result);
return result;
}
@Issue("JENKINS-35459")
@Test
public void testProjectNameInAListView() throws Exception {
MockFolder myMockFolder = j.createFolder("folder");
FreeStyleProject freeStyleProject = myMockFolder.createProject(FreeStyleProject.class, "myJob");
ListView listView = new ListView("ListView", j.jenkins);
listView.setRecurse(true);
listView.add(myMockFolder);
listView.add(freeStyleProject);
j.jenkins.addView(listView);
j.jenkins.setPrimaryView(listView);
assertEquals(2, j.jenkins.getPrimaryView().getAllItems().size());
WebClient wc = j.createWebClient();
Page result = wc.goTo("search/suggest?query=" + freeStyleProject.getName(), "application/json");
assertNotNull(result);
j.assertGoodStatus(result);
String content = result.getWebResponse().getContentAsString();
JSONObject jsonContent = (JSONObject)JSONSerializer.toJSON(content);
assertNotNull(jsonContent);
JSONArray jsonArray = jsonContent.getJSONArray("suggestions");
assertNotNull(jsonArray);
assertEquals(2, jsonArray.size());
Page searchResult = wc.goTo("search?q=" + myMockFolder.getName() + "%2F" + freeStyleProject.getName());
assertNotNull(searchResult);
j.assertGoodStatus(searchResult);
URL resultUrl = searchResult.getUrl();
assertTrue(resultUrl.toString().equals(j.getInstance().getRootUrl() + freeStyleProject.getUrl()));
}
}
| |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.workbench.ht.client.editors.taskdetails;
import java.util.Date;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.jbpm.workbench.ht.model.events.TaskRefreshedEvent;
import org.jbpm.workbench.ht.model.events.TaskSelectionEvent;
import org.jbpm.workbench.ht.service.TaskService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.uberfire.mocks.CallerMock;
import org.uberfire.mocks.EventSourceMock;
import static org.mockito.Mockito.*;
import static org.junit.Assert.*;
@RunWith(GwtMockitoTestRunner.class)
public class TaskDetailsPresenterTest {
private CallerMock<TaskService> callerMock;
@Mock
private TaskService taskService;
@Mock
private TranslationService translationService;
@Mock
private TaskDetailsPresenter.TaskDetailsView viewMock;
@Mock
private EventSourceMock<TaskRefreshedEvent> taskRefreshedEvent;
private TaskDetailsPresenter presenter;
@Before
public void setup() {
callerMock = new CallerMock<TaskService>(taskService);
doNothing().when(taskRefreshedEvent).fire(any(TaskRefreshedEvent.class));
presenter = new TaskDetailsPresenter(viewMock,
callerMock,
taskRefreshedEvent);
presenter.setTranslationService(translationService);
}
@Test
public void disableTaskDetailEditionTest() {
presenter.setReadOnlyTaskDetail();
verifyReadOnlyMode(1);
}
@Test
public void testSetTaskDetails_isForLog() {
when(translationService.format(any())).thenReturn("Completed");
boolean isForLog = true;
String status = "Completed";
String description = "description";
String actualOwner = "Owner";
Date expirationTime = new Date();
int priority = 2;
TaskSelectionEvent event = createTestTaskSelectionEvent(isForLog,
status,
description,
actualOwner,
expirationTime,
priority);
presenter.onTaskSelectionEvent(event);
verify(viewMock).setDueDate(any());
verify(viewMock).setDueDateTime(any());
verifySetTaskDetails(actualOwner,
status,
String.valueOf(priority));
verifyReadOnlyMode(1);
}
@Test
public void testSetTaskDetails_statusReady() {
when(translationService.format(any())).thenReturn("Completed");
boolean isForLog = false;
String status = "Completed";
String description = "description";
String actualOwner = "Owner";
Date expirationTime = new Date();
int priority = 2;
TaskSelectionEvent event = createTestTaskSelectionEvent(isForLog,
status,
description,
actualOwner,
expirationTime,
priority);
presenter.onTaskSelectionEvent(event);
verify(viewMock).setDueDate(any());
verify(viewMock).setDueDateTime(any());
verifySetTaskDetails(actualOwner,
status,
String.valueOf(priority));
verifyReadOnlyMode(0);
}
@Test
public void testUpdateDetails() {
String serverTemplateId = "serverTemplateId";
String containerId = "containerId";
Long taskId = 1L;
boolean isForLog = false;
TaskSelectionEvent event = new TaskSelectionEvent(serverTemplateId,
containerId,
taskId,
"task",
true,
isForLog,
"description",
new Date(),
"Completed",
"actualOwner",
2,
1L,
"processId");
presenter.onTaskSelectionEvent(event);
String description = "description";
Date dueDate = new Date();
int priority = 3;
presenter.updateTask(description,
dueDate,
priority);
verify(taskService).updateTask(serverTemplateId,
containerId,
taskId,
priority,
description,
dueDate);
final ArgumentCaptor<TaskRefreshedEvent> argument = ArgumentCaptor.forClass(TaskRefreshedEvent.class);
verify(taskRefreshedEvent).fire(argument.capture());
assertEquals(taskId,
argument.getValue().getTaskId());
}
private void verifySetTaskDetails(String actualOwner,
String status,
String priority) {
verify(viewMock).setUser(actualOwner);
verify(viewMock).setTaskStatus(status);
verify(viewMock).setTaskPriority(priority);
}
private void verifyReadOnlyMode(int i) {
verify(viewMock,
times(i)).setTaskDescriptionEnabled(false);
verify(viewMock,
times(i)).setDueDateEnabled(false);
verify(viewMock,
times(i)).setDueDateTimeEnabled(false);
verify(viewMock,
times(i)).setTaskPriorityEnabled(false);
verify(viewMock,
times(i)).setUpdateTaskVisible(false);
}
private TaskSelectionEvent createTestTaskSelectionEvent(boolean isForLog,
String status,
String description,
String actualOwner,
Date expirationTime,
int priority) {
return new TaskSelectionEvent("serverTemplateId",
"containerId",
1L,
"task",
true,
isForLog,
description,
expirationTime,
status,
actualOwner,
priority,
1L,
"processId");
}
}
| |
package com.djkong.newschecker;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.Editable;
import android.text.InputType;
import android.text.TextWatcher;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.Switch;
import android.widget.TextView;
import android.widget.Toast;
import com.getbase.floatingactionbutton.FloatingActionButton;
import com.wdullaer.materialdatetimepicker.date.DatePickerDialog;
import com.wdullaer.materialdatetimepicker.time.RadialPickerLayout;
import com.wdullaer.materialdatetimepicker.time.TimePickerDialog;
import java.util.Calendar;
public class NewsCheckerEditActivity extends AppCompatActivity implements
TimePickerDialog.OnTimeSetListener,
DatePickerDialog.OnDateSetListener{
private Toolbar mToolbar;
private EditText mTitleText;
private TextView mDateText, mTimeText, mRepeatText, mRepeatNoText, mRepeatTypeText, mURLText, mTargetStringText;
private FloatingActionButton mFAB1;
private FloatingActionButton mFAB2;
private Switch mRepeatSwitch;
private String mTitle;
private String mTime;
private String mDate;
private String mRepeatNo;
private String mRepeatType;
private String mActive;
private String mRepeat;
private String[] mDateSplit;
private String[] mTimeSplit;
private int mReceivedID;
private int mYear, mMonth, mHour, mMinute, mDay;
private long mRepeatTime;
private Calendar mCalendar;
private NewsChecker mReceivedNewsChecker;
private NewsCheckerDatabase rb;
private AlarmReceiver mAlarmReceiver;
private String mURL;
private String mTargetString;
// Constant Intent String
public static final String EXTRA_REMINDER_ID = "NewsChecker_ID";
// Values for orientation change
private static final String KEY_TITLE = "title_key";
private static final String KEY_TIME = "time_key";
private static final String KEY_DATE = "date_key";
private static final String KEY_REPEAT = "repeat_key";
private static final String KEY_REPEAT_NO = "repeat_no_key";
private static final String KEY_REPEAT_TYPE = "repeat_type_key";
private static final String KEY_ACTIVE = "active_key";
private static final String KEY_URL = "url_key";
private static final String KEY_TARGET_STRING = "target_string_key";
// Constant values in milliseconds
private static final long milMinute = 60000L;
private static final long milHour = 3600000L;
private static final long milDay = 86400000L;
private static final long milWeek = 604800000L;
private static final long milMonth = 2592000000L;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_reminder);
// Initialize Views
mToolbar = (Toolbar) findViewById(R.id.toolbar);
mTitleText = (EditText) findViewById(R.id.reminder_title);
mDateText = (TextView) findViewById(R.id.set_date);
mTimeText = (TextView) findViewById(R.id.set_time);
mRepeatText = (TextView) findViewById(R.id.set_repeat);
mRepeatNoText = (TextView) findViewById(R.id.set_repeat_no);
mRepeatTypeText = (TextView) findViewById(R.id.set_repeat_type);
mFAB1 = (FloatingActionButton) findViewById(R.id.starred1);
mFAB2 = (FloatingActionButton) findViewById(R.id.starred2);
mRepeatSwitch = (Switch) findViewById(R.id.repeat_switch);
mURLText = (TextView) findViewById(R.id.set_url);
mTargetStringText = (TextView) findViewById(R.id.set_target_string);
// Setup Toolbar
setSupportActionBar(mToolbar);
getSupportActionBar().setTitle(R.string.title_activity_edit_reminder);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
// Setup NewsChecker Title EditText
mTitleText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
mTitle = s.toString().trim();
mTitleText.setError(null);
}
@Override
public void afterTextChanged(Editable s) {
}
});
mURLText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
mURL = s.toString().trim();
mURLText.setError(null);
}
@Override
public void afterTextChanged(Editable s) {
}
});
mTargetStringText.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
mTargetString = s.toString().trim();
mTargetStringText.setError(null);
}
@Override
public void afterTextChanged(Editable s) {
}
});
// Get reminder id from intent
mReceivedID = Integer.parseInt(getIntent().getStringExtra(EXTRA_REMINDER_ID));
// Get reminder using reminder id
rb = new NewsCheckerDatabase(this);
mReceivedNewsChecker = rb.getNewsChecker(mReceivedID);
// Get values from reminder
mTitle = mReceivedNewsChecker.getTitle();
mDate = mReceivedNewsChecker.getDate();
mTime = mReceivedNewsChecker.getTime();
mRepeat = mReceivedNewsChecker.getRepeat();
mRepeatNo = mReceivedNewsChecker.getRepeatNo();
mRepeatType = mReceivedNewsChecker.getRepeatType();
mActive = mReceivedNewsChecker.getActive();
mURL = mReceivedNewsChecker.getURL();
mTargetString = mReceivedNewsChecker.getTargetString();
// Setup TextViews using reminder values
mTitleText.setText(mTitle);
mDateText.setText(mDate);
mTimeText.setText(mTime);
mRepeatNoText.setText(mRepeatNo);
mRepeatTypeText.setText(mRepeatType);
mRepeatText.setText("Every " + mRepeatNo + " " + mRepeatType + "(s)");
mURLText.setText(mURL);
mTargetStringText.setText(mTargetString);
// To recover from saved state on device rotation
if (savedInstanceState != null) {
String savedTitle = savedInstanceState.getString(KEY_TITLE);
mTitleText.setText(savedTitle);
mTitle = savedTitle;
String savedTime = savedInstanceState.getString(KEY_TIME);
mTimeText.setText(savedTime);
mTime = savedTime;
String savedDate = savedInstanceState.getString(KEY_DATE);
mDateText.setText(savedDate);
mDate = savedDate;
String saveRepeat = savedInstanceState.getString(KEY_REPEAT);
mRepeatText.setText(saveRepeat);
mRepeat = saveRepeat;
String savedRepeatNo = savedInstanceState.getString(KEY_REPEAT_NO);
mRepeatNoText.setText(savedRepeatNo);
mRepeatNo = savedRepeatNo;
String savedRepeatType = savedInstanceState.getString(KEY_REPEAT_TYPE);
mRepeatTypeText.setText(savedRepeatType);
mRepeatType = savedRepeatType;
mActive = savedInstanceState.getString(KEY_ACTIVE);
mURL = savedInstanceState.getString(KEY_URL);
mTargetString = savedInstanceState.getString(KEY_TARGET_STRING);
}
// Setup up active buttons
if (mActive.equals("false")) {
mFAB1.setVisibility(View.VISIBLE);
mFAB2.setVisibility(View.GONE);
} else if (mActive.equals("true")) {
mFAB1.setVisibility(View.GONE);
mFAB2.setVisibility(View.VISIBLE);
}
// Setup repeat switch
if (mRepeat.equals("false")) {
mRepeatSwitch.setChecked(false);
mRepeatText.setText(R.string.repeat_off);
} else if (mRepeat.equals("true")) {
mRepeatSwitch.setChecked(true);
}
// Obtain Date and Time details
mCalendar = Calendar.getInstance();
mAlarmReceiver = new AlarmReceiver();
mDateSplit = mDate.split("/");
mTimeSplit = mTime.split(":");
mDay = Integer.parseInt(mDateSplit[0]);
mMonth = Integer.parseInt(mDateSplit[1]);
mYear = Integer.parseInt(mDateSplit[2]);
mHour = Integer.parseInt(mTimeSplit[0]);
mMinute = Integer.parseInt(mTimeSplit[1]);
}
// To save state on device rotation
@Override
protected void onSaveInstanceState (Bundle outState) {
super.onSaveInstanceState(outState);
outState.putCharSequence(KEY_TITLE, mTitleText.getText());
outState.putCharSequence(KEY_TIME, mTimeText.getText());
outState.putCharSequence(KEY_DATE, mDateText.getText());
outState.putCharSequence(KEY_REPEAT, mRepeatText.getText());
outState.putCharSequence(KEY_REPEAT_NO, mRepeatNoText.getText());
outState.putCharSequence(KEY_REPEAT_TYPE, mRepeatTypeText.getText());
outState.putCharSequence(KEY_ACTIVE, mActive);
outState.putCharSequence(KEY_URL, mURLText.getText());
outState.putCharSequence(KEY_TARGET_STRING, mTargetStringText.getText());
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
}
// On clicking Time picker
public void setTime(View v){
Calendar now = Calendar.getInstance();
TimePickerDialog tpd = TimePickerDialog.newInstance(
this,
now.get(Calendar.HOUR_OF_DAY),
now.get(Calendar.MINUTE),
false
);
tpd.setThemeDark(false);
tpd.show(getFragmentManager(), "Timepickerdialog");
}
// On clicking Date picker
public void setDate(View v){
Calendar now = Calendar.getInstance();
DatePickerDialog dpd = DatePickerDialog.newInstance(
this,
now.get(Calendar.YEAR),
now.get(Calendar.MONTH),
now.get(Calendar.DAY_OF_MONTH)
);
dpd.show(getFragmentManager(), "Datepickerdialog");
}
// Obtain time from time picker
@Override
public void onTimeSet(RadialPickerLayout view, int hourOfDay, int minute) {
mHour = hourOfDay;
mMinute = minute;
if (minute < 10) {
mTime = hourOfDay + ":" + "0" + minute;
} else {
mTime = hourOfDay + ":" + minute;
}
mTimeText.setText(mTime);
}
// Obtain date from date picker
@Override
public void onDateSet(DatePickerDialog view, int year, int monthOfYear, int dayOfMonth) {
monthOfYear ++;
mDay = dayOfMonth;
mMonth = monthOfYear;
mYear = year;
mDate = dayOfMonth + "/" + monthOfYear + "/" + year;
mDateText.setText(mDate);
}
// On clicking the active button
public void selectFab1(View v) {
mFAB1 = (FloatingActionButton) findViewById(R.id.starred1);
mFAB1.setVisibility(View.GONE);
mFAB2 = (FloatingActionButton) findViewById(R.id.starred2);
mFAB2.setVisibility(View.VISIBLE);
mActive = "true";
}
// On clicking the inactive button
public void selectFab2(View v) {
mFAB2 = (FloatingActionButton) findViewById(R.id.starred2);
mFAB2.setVisibility(View.GONE);
mFAB1 = (FloatingActionButton) findViewById(R.id.starred1);
mFAB1.setVisibility(View.VISIBLE);
mActive = "false";
}
// On clicking the repeat switch
public void onSwitchRepeat(View view) {
boolean on = ((Switch) view).isChecked();
if (on) {
mRepeat = "true";
mRepeatText.setText("Every " + mRepeatNo + " " + mRepeatType + "(s)");
} else {
mRepeat = "false";
mRepeatText.setText(R.string.repeat_off);
}
}
// On clicking repeat type button
public void selectRepeatType(View v){
final String[] items = new String[5];
items[0] = "Minute";
items[1] = "Hour";
items[2] = "Day";
items[3] = "Week";
items[4] = "Month";
// Create List Dialog
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Select Type");
builder.setItems(items, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
mRepeatType = items[item];
mRepeatTypeText.setText(mRepeatType);
mRepeatText.setText("Every " + mRepeatNo + " " + mRepeatType + "(s)");
}
});
AlertDialog alert = builder.create();
alert.show();
}
// On clicking repeat interval button
public void setRepeatNo(View v){
AlertDialog.Builder alert = new AlertDialog.Builder(this);
alert.setTitle("Enter Number");
// Create EditText box to input repeat number
final EditText input = new EditText(this);
input.setInputType(InputType.TYPE_CLASS_NUMBER);
alert.setView(input);
alert.setPositiveButton("Ok",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
if (input.getText().toString().length() == 0) {
mRepeatNo = Integer.toString(1);
mRepeatNoText.setText(mRepeatNo);
mRepeatText.setText("Every " + mRepeatNo + " " + mRepeatType + "(s)");
}
else {
mRepeatNo = input.getText().toString().trim();
mRepeatNoText.setText(mRepeatNo);
mRepeatText.setText("Every " + mRepeatNo + " " + mRepeatType + "(s)");
}
}
});
alert.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int whichButton) {
// Do nothing
}
});
alert.show();
}
// On clicking the update button
public void updateNewsChecker(){
// Set new values in the reminder
mReceivedNewsChecker.setTitle(mTitle);
mReceivedNewsChecker.setDate(mDate);
mReceivedNewsChecker.setTime(mTime);
mReceivedNewsChecker.setRepeat(mRepeat);
mReceivedNewsChecker.setRepeatNo(mRepeatNo);
mReceivedNewsChecker.setRepeatType(mRepeatType);
mReceivedNewsChecker.setActive(mActive);
mReceivedNewsChecker.setURL(mURL);
mReceivedNewsChecker.setTargetString(mTargetString);
// Update reminder
rb.updateNewsChecker(mReceivedNewsChecker);
// Set up calender for creating the notification
mCalendar.set(Calendar.MONTH, --mMonth);
mCalendar.set(Calendar.YEAR, mYear);
mCalendar.set(Calendar.DAY_OF_MONTH, mDay);
mCalendar.set(Calendar.HOUR_OF_DAY, mHour);
mCalendar.set(Calendar.MINUTE, mMinute);
mCalendar.set(Calendar.SECOND, 0);
// Cancel existing notification of the reminder by using its ID
mAlarmReceiver.cancelAlarm(getApplicationContext(), mReceivedID);
// Check repeat type
if (mRepeatType.equals("Minute")) {
mRepeatTime = Integer.parseInt(mRepeatNo) * milMinute;
} else if (mRepeatType.equals("Hour")) {
mRepeatTime = Integer.parseInt(mRepeatNo) * milHour;
} else if (mRepeatType.equals("Day")) {
mRepeatTime = Integer.parseInt(mRepeatNo) * milDay;
} else if (mRepeatType.equals("Week")) {
mRepeatTime = Integer.parseInt(mRepeatNo) * milWeek;
} else if (mRepeatType.equals("Month")) {
mRepeatTime = Integer.parseInt(mRepeatNo) * milMonth;
}
// Create a new notification
if (mActive.equals("true")) {
if (mRepeat.equals("true")) {
mAlarmReceiver.setRepeatAlarm(getApplicationContext(), mCalendar, mReceivedID, mRepeatTime);
} else if (mRepeat.equals("false")) {
mAlarmReceiver.setAlarm(getApplicationContext(), mCalendar, mReceivedID);
}
}
// Create toast to confirm update
Toast.makeText(getApplicationContext(), "Edited",
Toast.LENGTH_SHORT).show();
onBackPressed();
}
// On pressing the back button
@Override
public void onBackPressed() {
super.onBackPressed();
}
// Creating the menu
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_add_reminder, menu);
return true;
}
// On clicking menu buttons
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
// On clicking the back arrow
// Discard any changes
case android.R.id.home:
onBackPressed();
return true;
// On clicking save reminder button
// Update reminder
case R.id.save_reminder:
mTitleText.setText(mTitle);
if (mTitleText.getText().toString().length() == 0)
mTitleText.setError("NewsChecker Title cannot be blank!");
else {
updateNewsChecker();
}
return true;
// On clicking discard reminder button
// Discard any changes
case R.id.discard_reminder:
Toast.makeText(getApplicationContext(), "Changes Discarded",
Toast.LENGTH_SHORT).show();
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
}
| |
/*
* Copyright (c) 2022, Inversoft Inc., All Rights Reserved
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.primeframework.mvc.util;
import java.beans.Introspector;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import org.primeframework.mvc.parameter.el.BeanExpressionException;
import org.primeframework.mvc.parameter.el.CollectionExpressionException;
import org.primeframework.mvc.parameter.el.ExpressionException;
import org.primeframework.mvc.parameter.el.ReadExpressionException;
import org.primeframework.mvc.parameter.el.UpdateExpressionException;
/**
* Provides support for reflection, bean properties and field access.
*
* @author Brian Pontarelli
*/
@SuppressWarnings("unchecked")
public class ReflectionUtils {
private static final Map<Class<?>, Map<String, Field>> fieldCache = new WeakHashMap<>();
private static final Map<Class<?>, Method[]> methods = new WeakHashMap<>();
private static final Map<String, Package> packageCache = new WeakHashMap<>();
private static final Map<Class<?>, Map<String, PropertyInfo>> propertyCache = new WeakHashMap<>();
private static final Map<String, MethodInformationExtractor> verifiers = new HashMap<>();
/**
* Return true if any of the provided annotations are provided on the field.
*
* @param field The field
* @param annotations a list of annotations to look for
* @return true if any of the provided annotations are present.
*/
public static boolean areAnyAnnotationsPresent(Field field, List<Class<? extends Annotation>> annotations) {
for (Class<? extends Annotation> annotation : annotations) {
if (field.isAnnotationPresent(annotation)) {
return true;
}
}
return false;
}
/**
* Finds all of the fields that have the given annotation on the given Object.
*
* @param type The class to find fields from.
* @param annotation The annotation.
*/
public static List<Field> findAllFieldsWithAnnotation(Class<?> type, Class<? extends Annotation> annotation) {
Map<String, Field> fields = findFields(type);
List<Field> fieldList = new ArrayList<>();
for (Field field : fields.values()) {
if (field.isAnnotationPresent(annotation)) {
fieldList.add(field);
}
}
return fieldList;
}
/**
* Finds all of the fields that have the given annotation on the given Object.
*
* @param type The class to find fields from.
* @param annotations The annotations.
*/
public static List<Field> findAllFieldsWithAnnotations(Class<?> type, List<Class<? extends Annotation>> annotations) {
Map<String, Field> fields = findFields(type);
List<Field> fieldList = new ArrayList<>();
for (Field field : fields.values()) {
for (Class<? extends Annotation> annotation : annotations) {
if (field.isAnnotationPresent(annotation)) {
fieldList.add(field);
break;
}
}
}
return fieldList;
}
/**
* Pulls all of the fields and java bean properties from the given Class and returns the names.
*
* @param type The Class to pull the names from.
* @return The names of all the fields and java bean properties.
*/
public static Set<String> findAllMembers(Class<?> type) {
Map<String, Field> fields = findFields(type);
Map<String, PropertyInfo> map = findPropertyInfo(type);
// Favor properties by adding fields first
Set<String> names = new HashSet<>(fields.keySet());
names.addAll(map.keySet());
return names;
}
/**
* Locates all of the members (fields and JavaBean properties) that have the given annotation and returns the name of
* the member and the annotation itself.
*
* @param type The class to find the member annotations from.
* @param annotation The annotation type.
* @param <T> The annotation type.
* @return A map of members to annotations.
*/
public static <T extends Annotation> Map<String, T> findAllMembersWithAnnotation(Class<?> type, Class<T> annotation) {
Map<String, T> annotations = new HashMap<>();
List<Field> fields = findAllFieldsWithAnnotation(type, annotation);
for (Field field : fields) {
annotations.put(field.getName(), field.getAnnotation(annotation));
}
Map<String, PropertyInfo> properties = findPropertyInfo(type);
for (String property : properties.keySet()) {
Map<String, Method> methods = properties.get(property).getMethods();
for (Method method : methods.values()) {
if (method.isAnnotationPresent(annotation)) {
annotations.put(property, method.getAnnotation(annotation));
break;
}
}
}
return annotations;
}
/**
* Finds all of the methods that have the given annotation on the given Object.
*
* @param type The class to find methods from.
* @param annotation The annotation.
*/
public static List<Method> findAllMethodsWithAnnotation(Class<?> type, Class<? extends Annotation> annotation) {
Method[] methods = findMethods(type);
List<Method> methodList = new ArrayList<>();
for (Method method : methods) {
if (method.isAnnotationPresent(annotation)) {
methodList.add(method);
}
}
return methodList;
}
/**
* Loads or fetches from the cache a Map of {@link Field} objects keyed into the Map by the field name they correspond
* to.
*
* @param type The class to grab the fields from.
* @return The Map, which could be null if the class has no fields.
*/
public static Map<String, Field> findFields(Class<?> type) {
Map<String, Field> fieldMap;
synchronized (fieldCache) {
// Otherwise look for the property Map or create and store
fieldMap = fieldCache.get(type);
if (fieldMap != null) {
return fieldMap;
}
fieldMap = new HashMap<>();
Field[] fields = type.getFields();
for (Field field : fields) {
fieldMap.put(field.getName(), field);
}
fieldCache.put(type, Collections.unmodifiableMap(fieldMap));
}
return fieldMap;
}
/**
* Loads and caches the methods of the given Class in an order array. The order of this array is that methods defined
* in superclasses are in the array first, followed by methods in the given type. The deeper the superclass, the
* earlier the methods are in the array.
*
* @param type The class.
* @return The methods.
*/
public static Method[] findMethods(final Class<?> type) {
synchronized (methods) {
Method[] array = methods.get(type);
if (array == null) {
array = type.getMethods();
array = Arrays.stream(array)
.map(m -> new SortableMethod(m, type))
.sorted()
.map(sm -> sm.method)
.toArray(Method[]::new);
methods.put(type, array);
return array;
}
return array;
}
}
/**
* Find a package by name and return it if it has the requested annotation.
*
* @param packageName the string name of the package.
* @param annotation the annotation to find in that package.
* @return the package if it exists and has the requested annotation or null.
*/
public static Package findPackageWithAnnotation(String packageName, Class<? extends Annotation> annotation) {
Package pkg;
synchronized (packageCache) {
pkg = packageCache.get(packageName);
if (pkg == null) {
pkg = Package.getPackage(packageName);
if (pkg != null) {
packageCache.put(packageName, pkg);
}
}
}
return pkg != null && pkg.isAnnotationPresent(annotation) ? pkg : null;
}
/**
* Loads or fetches from the cache a Map of {@link PropertyInfo} objects keyed into the Map by the property name they
* correspond to.
*
* @param type The class to grab the property map from.
* @return The Map, which could be empty if the class has no properties.
*/
public static Map<String, PropertyInfo> findPropertyInfo(Class<?> type) {
Map<String, PropertyInfo> propMap;
synchronized (propertyCache) {
// Otherwise look for the property Map or create and store
propMap = propertyCache.get(type);
if (propMap != null) {
return propMap;
}
propMap = new HashMap<>();
Set<String> errors = new HashSet<>();
Method[] methods = findMethods(type);
for (Method method : methods) {
// Skip bridge methods (covariant or generics) because the non-bridge method is the one that should be correct
if (method.isBridge()) {
continue;
}
PropertyName name = getPropertyNames(method);
if (name == null) {
continue;
}
PropertyInfo info = propMap.get(name.getName());
boolean constructed = false;
if (info == null) {
info = new PropertyInfo();
info.setName(name.getName());
info.setDeclaringClass(method.getDeclaringClass());
constructed = true;
}
// Unify get and is
String prefix = name.getPrefix();
if (prefix.equals("is")) {
prefix = "get";
}
Method existingMethod = info.getMethods().get(prefix);
if (existingMethod != null) {
errors.add("Two or more [" + prefix + "] methods exist in the class [" + type + "] and Prime can't determine which to call");
continue;
}
MethodInformationExtractor verifier = verifiers.get(prefix);
if (verifier == null) {
continue;
}
info.getMethods().put(prefix, method);
info.setGenericType(verifier.determineGenericType(method));
info.setType(verifier.determineType(method));
info.setIndexed(verifier.isIndexed(method));
if (constructed) {
propMap.put(name.getName(), info);
}
}
// Check for property errors
for (PropertyInfo info : propMap.values()) {
Method read = info.getMethods().get("get");
Method write = info.getMethods().get("set");
if (read != null && isValidGetter(read)) {
if (info.isIndexed()) {
errors.add("Invalid property named [" + info.getName() + "]. It mixes indexed and normal JavaBean methods.");
}
} else if (read != null && isValidIndexedGetter(read)) {
if (!info.isIndexed() && write != null) {
errors.add("Invalid property named [" + info.getName() + "]. It mixes indexed and normal JavaBean methods.");
}
} else if (read != null) {
errors.add("Invalid getter method for property named [" + info.getName() + "]");
}
if (write != null && isValidSetter(write)) {
if (info.isIndexed()) {
errors.add("Invalid property named [" + info.getName() + "]. It mixes indexed and normal JavaBean methods.");
}
} else if (write != null && isValidIndexedSetter(write)) {
if (!info.isIndexed() && read != null) {
errors.add("Invalid property named [" + info.getName() + "]. It mixes indexed and normal JavaBean methods.");
}
} else if (write != null) {
errors.add("Invalid setter method for property named [" + info.getName() + "]");
}
if (read != null && write != null &&
((info.isIndexed() && read.getReturnType() != write.getParameterTypes()[1]) ||
(!info.isIndexed() && read.getReturnType() != write.getParameterTypes()[0]))) {
errors.add("Invalid getter/setter pair for JavaBean property named [" + info.getName() + "] in class [" +
write.getDeclaringClass() + "]. The return type and parameter types must be identical");
}
}
if (errors.size() > 0) {
throw new BeanExpressionException("Invalid JavaBean class [" + type + "]. Errors are: \n" + errors);
}
propertyCache.put(type, Collections.unmodifiableMap(propMap));
}
return propMap;
}
/**
* This handles fetching a field value.
*
* @param field The field to get.
* @param object The object to get he field from.
* @return The value of the field.
* @throws ExpressionException If any mishap occurred whilst Reflecting sire. All the exceptions that could be thrown
* whilst invoking will be wrapped inside the ReflectionException.
*/
public static Object getField(Field field, Object object) throws ExpressionException {
try {
// I think we have a winner
return field.get(object);
} catch (IllegalAccessException iae) {
throw new ReadExpressionException("Illegal access for field [" + field + "]", iae);
} catch (IllegalArgumentException iare) {
throw new ReadExpressionException("Illegal argument for field [" + field + "]", iare);
}
}
/**
* Determines the type of the given member (field or proprty).
*
* @param type The class.
* @param member The member name.
* @return The type.
*/
public static Class<?> getMemberType(Class<?> type, String member) {
Field field = findFields(type).get(member);
if (field != null) {
return field.getType();
}
PropertyInfo propertyInfo = findPropertyInfo(type).get(member);
if (propertyInfo != null) {
return propertyInfo.getType();
}
return null;
}
/**
* Invokes the given method on the given class and handles propagation of runtime exceptions.
*
* @param method The method to invoke.
* @param obj The object to invoke the methods on.
* @param params The parameters passed to the method.
* @return The return from the method invocation.
*/
public static <T> T invoke(Method method, Object obj, Object... params) {
try {
return (T) method.invoke(obj, params);
} catch (IllegalAccessException e) {
throw new ExpressionException("Unable to call method [" + method + "] because it isn't accessible", e);
} catch (IllegalArgumentException e) {
throw new ExpressionException("Unable to call method [" + method + "] because the incorrect parameters were passed to it", e);
} catch (InvocationTargetException e) {
Throwable target = e.getTargetException();
if (target instanceof RuntimeException) {
throw (RuntimeException) target;
}
if (target instanceof Error) {
throw (Error) target;
}
throw new ExpressionException("Unable to call method [" + method + "]", e);
}
}
/**
* Invokes all the given methods on the given object.
*
* @param obj The object to invoke the methods on.
* @param methods The methods to invoke.
*/
public static void invokeAll(Object obj, List<Method> methods) {
for (Method method : methods) {
try {
method.invoke(obj);
} catch (IllegalAccessException e) {
throw new ExpressionException("Unable to call method [" + method + "] because it isn't accessible", e);
} catch (InvocationTargetException e) {
Throwable target = e.getTargetException();
if (target instanceof RuntimeException) {
throw (RuntimeException) target;
}
throw new ExpressionException("Unable to call method [" + method + "]", e);
}
}
}
/**
* This handles invoking the getter method.
*
* @param method The method to invoke.
* @param object The object to invoke the method on.
* @return The return value of the method.
* @throws RuntimeException If the target of the InvocationTargetException is a RuntimeException, in which case, it is
* re-thrown.
* @throws Error If the target of the InvocationTargetException is an Error, in which case, it is
* re-thrown.
*/
public static Object invokeGetter(Method method, Object object) throws RuntimeException, Error {
return invoke(method, object);
}
/**
* This handles invoking the setter method and also will handle a single special case where the setter method takes a
* single object and the value is a collection with a single value.
*
* @param method The method to invoke.
* @param object The object to invoke the method on.
* @param value The value to set into the method.
* @throws RuntimeException If the target of the InvocationTargetException is a RuntimeException, in which case, it is
* re-thrown.
* @throws Error If the target of the InvocationTargetException is an Error, in which case, it is
* re-thrown.
*/
public static void invokeSetter(Method method, Object object, Object value) throws RuntimeException, Error {
Class[] types = method.getParameterTypes();
if (types.length != 1) {
throw new UpdateExpressionException("Invalid method [" + method + "] it should take a single parameter");
}
Class type = types[0];
if (!type.isInstance(value) && value instanceof Collection) {
// Handle the Collection special case
Collection c = (Collection) value;
if (c.size() == 1) {
value = c.iterator().next();
} else {
throw new ExpressionException("Cannot set a Collection that contains multiple values into the method [" +
method + "] which is not a collection.");
}
}
invoke(method, object, value);
}
/**
* Check if the method is a proper java bean getter-property method. This means that it starts with get, has the form
* getFoo or getFOO, has no parameters and returns a non-void value.
*
* @param method The method to check.
* @return True if valid, false otherwise.
*/
public static boolean isValidGetter(Method method) {
return (method.getParameterTypes().length == 0 && method.getReturnType() != Void.TYPE);
}
/**
* Check if the method is a proper java bean indexed getter method. This means that it starts with get, has the form
* getFoo or getFOO, has one parameter, an indices, and returns a non-void value.
*
* @param method The method to check.
* @return True if valid, false otherwise.
*/
public static boolean isValidIndexedGetter(Method method) {
return (method.getParameterTypes().length == 1 && method.getReturnType() != Void.TYPE);
}
/**
* Check if the method is a proper java bean indexed setter method. This means that it starts with set, has the form
* setFoo or setFOO, takes a two parameters, an indices and a value.
*
* @param method The method to check.
* @return True if valid, false otherwise.
*/
public static boolean isValidIndexedSetter(Method method) {
return (method.getParameterTypes().length == 2);
}
/**
* Check if the method is a proper java bean setter-property method. This means that it starts with set, has the form
* setFoo or setFOO, takes a single parameter.
*
* @param method The method to check.
* @return True if valid, false otherwise.
*/
public static boolean isValidSetter(Method method) {
return (method.getParameterTypes().length == 1);
}
/**
* This handles setting a value on a field and also will handle a single special case where the setter method takes a
* single object and the value is a collection with a single value.
*
* @param field The field to set.
* @param object The object to set the field on.
* @param value The value to set into the field.
* @throws ExpressionException If any mishap occurred whilst Reflecting sire. All the exceptions that could be thrown
* whilst invoking will be wrapped inside the ReflectionException.
*/
@SuppressWarnings("rawtypes")
public static void setField(Field field, Object object, Object value) throws ExpressionException {
Class<?> fieldType = field.getType();
boolean valueIsArray = value != null && value.getClass().isArray();
boolean fieldIsCollection = Collection.class.isAssignableFrom(field.getType());
boolean valueIsCollection = value != null && Collection.class.isAssignableFrom(value.getClass());
// If the fieldType is not an instance of the value, and the value IS a collection, but the field IS NOT and
// the collection is of size 1, we can handle this by retrieving the first value from the collection.
if (!fieldType.isInstance(value) && (valueIsCollection && !fieldIsCollection)) {
// Handle the Collection special case
Collection c = (Collection) value;
if (c.size() == 1) {
value = c.iterator().next();
} else {
throw new CollectionExpressionException("Cannot set a Collection that contains multiple values into the field [" +
field + "] which is not a collection.");
}
}
// If the field is final, or not the same type, we are eligible for the "brute force collection coercion technique"
// also referred to as the "BFCCT", patent pending. Check if we are eligible and if the field and the value are both
// collections more or less.
boolean coercionEligible = Modifier.isFinal(field.getModifiers()) || !fieldType.isInstance(value);
boolean fieldAndValueAreCollectionCompatible = fieldIsCollection && (valueIsArray || valueIsCollection);
try {
if (coercionEligible && fieldAndValueAreCollectionCompatible) {
Collection collection = (Collection) field.get(object);
// Non final fields may be null
if (collection == null) {
collection = (Collection) fieldType.newInstance();
} else {
collection.clear();
}
if (value.getClass().isArray()) {
collection.addAll(Arrays.asList((Object[]) value));
} else {
collection.addAll((Collection) value);
}
} else {
field.set(object, value);
}
} catch (IllegalAccessException e) {
throw new UpdateExpressionException("Illegal access for field [" + field + "]", e);
} catch (IllegalArgumentException e) {
throw new UpdateExpressionException("Illegal argument for field [" + field + "]", e);
} catch (InstantiationException e) {
throw new UpdateExpressionException("Instantiation exception for field [" + field + "]", e);
}
}
private static Set<Class<?>> allInterfaces(Class<?> type) {
Class<?>[] interfaces = type.getInterfaces();
Set<Class<?>> set = new HashSet<>(Arrays.asList(interfaces));
for (Class<?> anInterface : interfaces) {
set.addAll(allInterfaces(anInterface));
}
return set;
}
private static boolean declaredOnImplementedInterface(Method method, Class<?> target) {
if (!method.isDefault()) {
return false;
}
Class<?> declaringType = method.getDeclaringClass();
Set<Class<?>> interfaces = allInterfaces(target);
return interfaces.contains(declaringType);
}
/**
* Using the given Method, it returns the name of the java bean property and the prefix of the method.
* <p/>
* <h3>Examples:</h3>
* <p/>
* <pre>
* getFoo -> get, foo
* getX -> get, x
* getURL -> get, URL
* handleBar -> handle, bar
* </pre>
*
* @param method The method to translate.
* @return The property names or null if this was not a valid property Method.
*/
private static PropertyName getPropertyNames(Method method) {
String name = method.getName();
char[] ca = name.toCharArray();
int startIndex = -1;
for (int i = 0; i < ca.length; i++) {
char c = ca[i];
if (Character.isUpperCase(c) && i == 0) {
break;
} else if (Character.isUpperCase(c)) {
startIndex = i;
break;
}
}
if (startIndex == -1) {
return null;
}
String propertyName = Introspector.decapitalize(name.substring(startIndex));
String prefix = name.substring(0, startIndex);
return new PropertyName(prefix, propertyName);
}
/**
* This interface defines a mechanism to extract information from JavaBean properties.
*
* @author Brian Pontarelli
*/
public interface MethodInformationExtractor {
/**
* Determines the generic type of the property.
*
* @param method The method to pull the generic type from.
* @return The generic type.
*/
Type determineGenericType(Method method);
/**
* Determines the type of the method. For getters this is the return type. For setters this is the parameter.
*
* @param method The method.
*/
Class<?> determineType(Method method);
/**
* Whether or not this property is an indexed property.
*
* @param method The method to determine if it is indexed.
* @return True if indexed or false otherwise.
*/
boolean isIndexed(Method method);
}
/**
* This class extracts information about JavaBean standard getter methods. The forms of the methods are as follows:
* <p/>
* <h3>Indexed methods</h3>
* <p/>
* <h4>Retrieval</h4>
* <p/>
* <pre>
* public Object getFoo(int index)
* public boolean isFoo(int index)
* </pre>
* <p/>
* <h3>Normal methods</h3>
* <p/>
* <h4>Retrieval</h4>
* <p/>
* <pre>
* public Object getFoo()
* public boolean isFoo()
* </pre>
* <p/>
* All <b>is</b> methods must have a return type of boolean regardless of being indexed or not.
*
* @author Brian Pontarelli
*/
public static class GetMethodInformationExtractor implements MethodInformationExtractor {
/**
* @param method The method to get the generic type from.
* @return Returns the return type of the method.
*/
@Override
public Type determineGenericType(Method method) {
return method.getGenericReturnType();
}
@Override
public Class<?> determineType(Method method) {
return method.getReturnType();
}
@Override
public boolean isIndexed(Method method) {
return isValidIndexedGetter(method);
}
}
/**
* This class is a small helper class that is used to store the read and write methods of a bean property as well as a
* flag that determines if it is indexed.
*
* @author Brian Pontarelli
*/
public static class PropertyInfo {
private final Map<String, Method> methods = new HashMap<>();
private Class<?> declaringClass;
private Type genericType;
private boolean indexed;
private String name;
private Class<?> type;
public Class<?> getDeclaringClass() {
return declaringClass;
}
public void setDeclaringClass(Class<?> declaringClass) {
this.declaringClass = declaringClass;
}
public Type getGenericType() {
return genericType;
}
public void setGenericType(Type genericType) {
this.genericType = genericType;
}
public Map<String, Method> getMethods() {
return methods;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Class<?> getType() {
return type;
}
public void setType(Class<?> type) {
this.type = type;
}
public boolean isIndexed() {
return indexed;
}
public void setIndexed(boolean indexed) {
this.indexed = indexed;
}
public String toString() {
return "Property named [" + name + "] in class [" + declaringClass + "]";
}
}
/**
* This class stores the information about JavaBean methods including the prefix and propertyName.
*
* @author Brian Pontarelli
*/
public static class PropertyName {
private final String name;
private final String prefix;
public PropertyName(String prefix, String name) {
this.prefix = prefix;
this.name = name;
}
public String getName() {
return name;
}
public String getPrefix() {
return prefix;
}
}
/**
* This class extracts information from JavaBean standard setter methods. The forms of the methods are as follows:
* <p/>
* <h3>Indexed methods</h3>
* <p/>
* <h4>Store</h4>
* <p/>
* <pre>
* public void setFoo(int index, Object obj)
* public void setBool(int index, boolean bool)
* </pre>
* <h3>Normal methods</h3>
* <p/>
* <h4>Storage</h4>
* <p/>
* <pre>
* public void setFoo(Object o)
* </pre>
*
* @author Brian Pontarelli
*/
public static class SetMethodInformationExtractor implements MethodInformationExtractor {
@Override
public Type determineGenericType(Method method) {
Type[] types = method.getGenericParameterTypes();
if (types.length == 0) {
throw new ExpressionException("Unable to call method [" + method + "] because while the name indicates it is a setter, the method has 0 arguments. You need to add 1 or more arguments, or rename this method.");
}
if (types.length == 1) {
return types[0];
}
return types[1];
}
@Override
public Class<?> determineType(Method method) {
return isIndexed(method) ? method.getParameterTypes()[1] : method.getParameterTypes()[0];
}
@Override
public boolean isIndexed(Method method) {
return isValidIndexedSetter(method);
}
}
public static class SortableMethod implements Comparable<SortableMethod> {
private final int depth;
private final Method method;
public SortableMethod(Method method, Class<?> targetType) {
this.method = method;
int depth = 0;
Class<?> declaringType = method.getDeclaringClass();
while (declaringType != targetType && !declaredOnImplementedInterface(method, targetType) && targetType != null) {
targetType = targetType.getSuperclass();
depth++;
}
this.depth = depth;
}
@Override
public int compareTo(SortableMethod o) {
if (depth == o.depth) {
return method.getName().compareTo(o.method.getName());
}
return o.depth - depth;
}
}
static {
verifiers.put("is", new GetMethodInformationExtractor());
verifiers.put("get", new GetMethodInformationExtractor());
verifiers.put("set", new SetMethodInformationExtractor());
}
}
| |
package apollo.analysis.filter;
import java.util.*;
/**
* This contains all of the filtering parameters to use
* on a set of raw output from a computational analysis of
* the sequence.
**/
public class AnalysisInput {
public static int NO_LIMIT = -1;
protected String tier;
protected String display_type;
protected String analysis_type;
protected boolean filter = true;
protected int min_score = NO_LIMIT;
protected int min_identity = NO_LIMIT;
protected int min_length = NO_LIMIT;
protected int wordsize = NO_LIMIT;
protected int max_ratio = 15;
protected double max_expect = NO_LIMIT;
protected int max_cover = -1;
protected int max_exons = -1;
protected int max_gap = -1;
protected int coincidence = NO_LIMIT;
protected int offset = 0;
protected boolean remove_twilight = false;
protected boolean remove_shadows = false;
protected boolean distinctHSPs = false;
protected boolean split_frames = false;
protected boolean split_dups = false;
protected boolean use_percentage = true;
protected boolean join_EST_ends = false;
protected boolean trim_polyA = false;
protected String revcomp_3prime = null;
protected String debug_str = null;
protected boolean debug = false;
protected Date run_date;
protected String database = "";
protected boolean collapse = false;
protected String autopromote = null;
protected boolean keep_polyApredict = false;
protected boolean keep_promoter = false;
/** The filter panel isn't necessarily the right place to put this checkbox,
* but it was the easiest way to implement it. */
protected boolean queryIsGenomic = false;
public AnalysisInput () {
}
public void setTier (String tier) {
this.tier = tier;
}
public String getTier () {
return tier;
}
public void setType (String type) {
this.display_type = type;
}
public String getType () {
return display_type;
}
public void setAnalysisType (String type) {
this.analysis_type = type;
}
public String getAnalysisType () {
return analysis_type;
}
public boolean runFilter () {
return filter;
}
public void runFilter (boolean filter) {
this.filter = filter;
}
public void setMaxRatio (String maxratioStr) {
try {
setMaxRatio (Integer.parseInt (maxratioStr));
}
catch (Exception e) {
System.out.println ("Unable to parse max_ratio from " + maxratioStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMaxRatio (int max_ratio) {
this.max_ratio = max_ratio;
}
public int getMaxRatio () {
return max_ratio;
}
public void setWordSize (String wordsizeStr) {
try {
setWordSize (Integer.parseInt (wordsizeStr));
}
catch (Exception e) {
System.out.println ("Unable to parse wordsize from " + wordsizeStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setWordSize (int wordsize) {
if (wordsize > 0)
this.wordsize = wordsize;
else
this.wordsize = NO_LIMIT;
}
public int getWordSize () {
return wordsize;
}
public boolean removeLowContent () {
return wordsize != NO_LIMIT;
}
public void setMaxExpect (String max_expectStr) {
try {
setMaxExpect (Double.valueOf(max_expectStr).doubleValue());
}
catch (Exception e) {
System.out.println ("Unable to parse max_expect from " + max_expectStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMaxExpect (double max_expect) {
if (max_expect >= 0 && max_expect <= 1)
this.max_expect = max_expect;
else
this.max_expect = NO_LIMIT;
}
public double getMaxExpect () {
return max_expect;
}
public boolean useExpect() {
return max_expect != NO_LIMIT;
}
public void setMaxCover (String max_coverStr) {
try {
setMaxCover (Integer.parseInt (max_coverStr));
}
catch (Exception e) {
System.out.println ("Unable to parse max_cover from " + max_coverStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMaxCover (int max_cover) {
if (max_cover > 0)
this.max_cover = max_cover;
else
this.max_cover = NO_LIMIT;
}
public int getMaxCover () {
return max_cover;
}
public boolean limitCoverage () {
return max_cover != NO_LIMIT;
}
public void setMaxExons (String max_exonsStr) {
try {
setMaxExons (Integer.parseInt (max_exonsStr));
}
catch (Exception e) {
System.out.println ("Unable to parse max_exons from " + max_exonsStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMaxExons (int max_exons) {
if (max_exons > 0)
this.max_exons = max_exons;
else
this.max_exons = NO_LIMIT;
}
public int getMaxExons () {
return max_exons;
}
public boolean limitMaxExons () {
return max_exons != NO_LIMIT;
}
public void setMaxAlignGap (String max_gapStr) {
try {
setMaxAlignGap (Integer.parseInt (max_gapStr));
}
catch (Exception e) {
System.out.println ("Unable to parse max_gap from " + max_gapStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMaxAlignGap (int max_gap) {
if (max_gap >= 0 && max_gap <= 100)
this.max_gap = max_gap;
else
this.max_gap = NO_LIMIT;
}
public int getMaxAlignGap () {
return max_gap;
}
public boolean limitAlignGap() {
return max_gap != NO_LIMIT;
}
public void setCoincidence (String coincidenceStr) {
try {
setCoincidence (Integer.parseInt (coincidenceStr));
}
catch (Exception e) {
System.out.println ("Unable to parse coincidence from " + coincidenceStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setCoincidence (int coincidence) {
if (coincidence >= 0 && coincidence <= 100)
this.coincidence = coincidence;
else
this.coincidence = NO_LIMIT;
}
public int getCoincidence () {
return coincidence;
}
public boolean useCoincidence() {
return coincidence != NO_LIMIT;
}
public boolean removeShadows() {
return remove_shadows;
}
public void filterShadows(boolean remove) {
remove_shadows = remove;
}
/* at some point make it possible for these to be set
in the meantime a value that is less than 0 will force
the program to use the defaults in the twilightfilter
*/
public int getTwilightUpper () {
return -1;
}
public int getTwilightLower() {
return -1;
}
public boolean removeTwilights () {
return remove_twilight;
}
public void setRemoveTwilights(boolean dothis) {
this.remove_twilight = dothis;
}
public void setMinScore (String scoreStr) {
try {
setMinScore (Integer.parseInt (scoreStr));
}
catch (Exception e) {
System.out.println ("Unable to parse score from " + scoreStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMinScore (int score) {
if (score > 0)
this.min_score = score;
else
this.min_score = NO_LIMIT;
}
public int getMinScore () {
return min_score;
}
public boolean useScore() {
return min_score != NO_LIMIT;
}
public void setMinIdentity (String identityStr) {
try {
setMinIdentity (Integer.parseInt (identityStr));
}
catch (Exception e) {
System.out.println ("Unable to parse identity from " + identityStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMinIdentity (int identity) {
if (identity > 0)
this.min_identity = identity;
else
this.min_identity = NO_LIMIT;
}
public int getMinIdentity () {
return min_identity;
}
public boolean useIdentity() {
return min_identity != NO_LIMIT;
}
public void setMinLength (String lengthStr) {
try {
setMinLength (Integer.parseInt (lengthStr));
}
catch (Exception e) {
System.out.println ("Unable to parse length from " + lengthStr +
" " + e.getMessage());
e.printStackTrace();
}
}
public void setMinLength (int length) {
if (length > 0)
this.min_length = length;
else
this.min_length = NO_LIMIT;
}
public int getMinLength () {
return min_length;
}
public boolean useLength() {
return min_length != NO_LIMIT;
}
public void setLengthUnits (boolean is_percent) {
this.use_percentage = is_percent;
}
public boolean usePercentage() {
return use_percentage;
}
public void autonomousHSPs(boolean dothis) {
this.distinctHSPs = dothis;
}
public boolean useAutonomousHSPs() {
return distinctHSPs;
}
public void setSplitTandems(boolean dothis) {
this.split_dups = dothis;
}
public boolean splitTandems() {
return split_dups;
}
public void revComp3Prime(String suffix) {
this.revcomp_3prime = suffix;
}
public boolean revComp3Prime() {
return (revcomp_3prime != null && !revcomp_3prime.equals(""));
}
public String get3PrimeSuffix() {
return revcomp_3prime;
}
public void joinESTends(boolean dothis) {
this.join_EST_ends = dothis;
}
public boolean joinESTends() {
return join_EST_ends;
}
public void trimPolyA(boolean dothis) {
this.trim_polyA = dothis;
}
public boolean trimPolyA() {
return trim_polyA;
}
public void keepPolyA(boolean dothis) {
this.keep_polyApredict = dothis;
}
public boolean keepPolyA() {
return keep_polyApredict;
}
public void keepPromoter(boolean dothis) {
this.keep_promoter = dothis;
}
public boolean keepPromoter() {
return keep_promoter;
}
public void setDebug(String debug_str) {
this.debug_str = debug_str;
setDebug (debug_str != null &&
(debug_str.equalsIgnoreCase("t") ||
debug_str.equalsIgnoreCase("true")));
}
public void setDebug(boolean dothis) {
this.debug = dothis;
}
public boolean debugFilter(String this_str) {
return (debug || (debug_str != null && this_str.startsWith(debug_str)));
}
public void setSplitFrames(boolean dothis) {
this.split_frames = dothis;
}
public boolean splitFrames() {
return split_frames;
}
public void setQueryIsGenomic(boolean dothis) {
// System.out.println("AnalysisInput.setQueryIsGenomic " + dothis); // DEL
this.queryIsGenomic = dothis;
}
public boolean queryIsGenomic() {
return queryIsGenomic;
}
public void setOffset (String offsetStr, int genomic_start) {
offsetStr = offsetStr.trim();
if (offsetStr != null && !offsetStr.equals("")) {
try {
setOffset (Integer.parseInt (offsetStr) + genomic_start - 1);
}
catch (Exception e) {
System.out.println ("Unable to parse offset from " + offsetStr +
" " + e.getMessage());
e.printStackTrace();
setOffset (genomic_start); // - 1);
}
}
else {
setOffset (genomic_start); // - 1);
}
}
public void setOffset (int offset) {
if (offset > 0)
this.offset = offset;
else
this.offset = 0;
System.out.println ("Set offset to " + this.offset);
}
public int getOffset () {
return offset;
}
public Date getRunDate () {
return run_date;
}
public void setRunDate (long processed_time) {
if (processed_time > 0)
run_date = new java.util.Date(processed_time * 1000);
else
run_date = new java.util.Date(); //default to now
}
public void seRtunDate (Date run_date) {
this.run_date = run_date;
}
public void collapseResults (boolean collapse) {
this.collapse = collapse;
}
public boolean collapseResults () {
return collapse;
}
public void promoteResults (String baptizer) {
this.autopromote = baptizer;
}
public boolean promoteResults () {
return autopromote != null;
}
public String getBaptizer () {
return autopromote;
}
public void setDatabase (String db) {
this.database = db;
}
public String getDatabase() {
return this.database;
}
}
| |
/**
* Copyright (C) 2015 Hiroshi Sakurai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.anolivetree.goncurrent;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.concurrent.locks.ReentrantLock;
public class Chan<T> implements Iterable<T> {
static final ReentrantLock sLock = new ReentrantLock();
static final Object sWouldBlock = new Object();
static public <T> Chan<T> create(int depth) {
return new Chan<T>(depth);
}
static public class Result<T> {
public final T data;
public final boolean ok;
public Result(T data, boolean ok) {
this.data = data;
this.ok = ok;
}
}
private final int mDepth;
private final ArrayList<ThreadContext> mReceivers = new ArrayList<ThreadContext>();
private final T[] mData;
private final ArrayList<ThreadContext> mSenders = new ArrayList<ThreadContext>();
private boolean mIsClosed = false;
private T mEnd = null;
private int mDataR;
private int mDataW;
private int mDataNum;
private Chan(int depth) {
if (depth < 0) {
throw new IllegalArgumentException("depth < 0");
}
mDepth = depth;
mData = (T[])new Object[depth];
mDataNum = 0;
mDataR = 0;
mDataW = 0;
}
public void send(T data) {
sLock.lock();
try {
if (Config.DEBUG_CHECK_STATE) {
ThreadContext.get().ensureHasNoChan();
}
send(data, false);
} finally {
sLock.unlock();
}
}
/**
*
* @return false if it would block. Even when it's true, it might mean that the thread is interrupted.
*/
boolean send(T data, boolean nonblock) {
while (true) {
// Go's implementation forbids sending even though a goroutine has been waiting to send before close() is called.
if (mIsClosed) {
throw new RuntimeException("send on closed channel");
}
// try to make space in the queue
while (mReceivers.size() > 0 && mDataNum > 0) {
// copy data from queue
passDataToFirstReceiverAndWakeup(mData[mDataR]);
mData[mDataR] = null;
mDataNum--;
mDataR++;
if (mDataR >= mDepth) {
mDataR = 0;
}
}
// no more receivers || no more data || no receiver && no data
if (mReceivers.size() > 0) {
// pass data directly to a receiver
passDataToFirstReceiverAndWakeup(data);
return true;
}
if (mDataNum < mDepth) {
// copy data to the queue
mData[mDataW] = data;
mDataNum++;
mDataW++;
if (mDataW >= mDepth) {
mDataW = 0;
}
return true;
}
if (nonblock) {
return false;
}
// wait
ThreadContext context = null;
try {
context = ThreadContext.get();
if (Config.DEBUG_CHECK_STATE) {
context.ensureHasNoChan();
}
context.addSendChan(this, data);
mSenders.add(context);
try {
context.mCond.await();
} catch (InterruptedException e) {
mSenders.remove(context);
return true;
}
boolean exist = mSenders.remove(context);
if (exist) {
throw new RuntimeException("not removed from mSenders list");
}
if (context.mUnblockedChanIndex != -1) {
// woken up by receiver
return true;
}
// woken up by close.
} finally {
context.clearChan();
}
}
}
public T receive() {
sLock.lock();
try {
if (Config.DEBUG_CHECK_STATE) {
ThreadContext.get().ensureHasNoChan();
}
Result<T> result = receive(false);
return result.data;
} finally {
sLock.unlock();
}
}
public Result<T> receiveWithResult() {
sLock.lock();
try {
if (Config.DEBUG_CHECK_STATE) {
ThreadContext.get().ensureHasNoChan();
}
return receive(false);
} finally {
sLock.unlock();
}
}
/**
*
* @return If closed, returns 'end' of close(T end) or null. When interrupted, returns null.
*/
Result<T> receive(boolean nonblock) {
while (true) {
boolean hasRet = false;
T data = null;
// receive from queue
if (mDataNum > 0) {
data = mData[mDataR];
hasRet = true;
mData[mDataR] = null;
mDataNum--;
mDataR++;
if (mDataR >= mDepth) {
mDataR = 0;
}
}
// receive directly from the first sender
if (!hasRet && mSenders.size() > 0) {
if (Config.DEBUG_PRINT) {
System.out.printf("receive: receive directly from sender. numSenders=%d\n", mSenders.size());
}
data = removeAndWakeupFirstSender();
hasRet = true;
}
// copy data from senders to the queue
while (mSenders.size() > 0 && mDataNum < mDepth) {
if (Config.DEBUG_PRINT) {
System.out.printf("receive: queue is some room. copy data from sender to the queue. numSenders=%d\n", mSenders.size());
}
mData[mDataW] = removeAndWakeupFirstSender();
mDataNum++;
mDataW++;
if (mDataW >= mDepth) {
mDataW = 0;
}
}
if (hasRet) {
return new Result<T>(data, true);
}
if (mIsClosed) {
return new Result<T>(mEnd, false);
}
if (nonblock) {
return null;
}
ThreadContext context = null;
try {
context = ThreadContext.get();
if (Config.DEBUG_CHECK_STATE) {
context.ensureHasNoChan();
}
context.addReceiveChan(this);
if (Config.DEBUG_PRINT) {
System.out.println("add receiver " + context);
}
mReceivers.add(context);
// Wait until there's a space in the queue or any sender appear.
try {
context.mCond.await();
} catch (InterruptedException e) {
boolean exist = mReceivers.remove(context);
if (Config.DEBUG_PRINT) {
System.out.println("remove context from receiver list context=" + context + " " + exist);
}
return new Result<T>(null, false);
}
if (Config.DEBUG_PRINT) {
System.out.println("receive: woken up");
}
boolean exist = mReceivers.remove(context);
if (Config.DEBUG_PRINT) {
System.out.println("receive: remove context from receiver list context=" + context + " " + exist);
}
if (context.mUnblockedChanIndex != -1) {
data = (T) context.mReceivedData;
return new Result<T>(data, true);
}
// woken up by close()
} finally {
context.clearChan();
}
}
}
private void passDataToFirstReceiverAndWakeup(T data) {
ThreadContext context = mReceivers.remove(0);
if (Config.DEBUG_PRINT) {
System.out.println("remove receiver(head) remain=" + mReceivers.size() + " " + context);
}
context.markReceiverUnblockedAndRemoveFromOtherChans(this, data);
context.mCond.signal();
}
private T removeAndWakeupFirstSender() {
ThreadContext context = mSenders.remove(0);
T data = (T) context.markSenderUnblockedAnsRemoveFromOtherChans(this);
context.mCond.signal();
return data;
}
void addToSenderList(ThreadContext context) {
mSenders.add(context);
if (Config.DEBUG_PRINT) {
System.out.println("add context to sender list (select) context=" + context + ", numSenders=" + mSenders.size());
}
}
void removeFromSenderList(ThreadContext context) {
mSenders.remove(context);
if (Config.DEBUG_PRINT) {
System.out.println("remove context from sender list (select) context=" + context);
}
}
void addToReceiverList(ThreadContext context) {
mReceivers.add(context);
if (Config.DEBUG_PRINT) {
System.out.println("add context to receiver list (select) context=" + context + ", numReceivers=" + mReceivers.size());
}
}
void removeFromReceiverList(ThreadContext context) {
boolean exist = mReceivers.remove(context);
if (Config.DEBUG_PRINT) {
System.out.println("remove context from receiver list(select) context=" + context + " " + exist);
}
}
public void close() {
close(null);
}
public void close(T end) {
sLock.lock();
if (!mIsClosed) {
mIsClosed = true;
mEnd = end;
// wakeup receivers
int size = mReceivers.size();
for (int i = 0; i < size; i++) {
mReceivers.get(i).mCond.signal();
}
// wakeup senders
size = mSenders.size();
for (int i = 0; i < size; i++) {
mSenders.get(i).mCond.signal();
}
}
sLock.unlock();
}
public int length() {
sLock.lock();
int ret = mDataNum;
sLock.unlock();
return ret;
}
public int capacity() {
return mDepth;
}
@Override
public Iterator<T> iterator() {
if (Config.DEBUG_PRINT) {
System.out.printf("iterator()\n");
}
return new ChanIterator<>(this);
}
static private class ChanIterator<T> implements Iterator<T> {
final private Chan<T> mChan;
private boolean mHasData = false;
private T mData = null;
public ChanIterator(Chan<T> chan) {
mChan = chan;
}
@Override
public boolean hasNext() {
if (Config.DEBUG_PRINT) {
System.out.printf("hasNext() called\n");
}
if (mHasData) {
if (Config.DEBUG_PRINT) {
System.out.printf("hasNext() return true\n");
}
return true;
}
read();
if (Config.DEBUG_PRINT) {
System.out.printf("hasNext() return " + mHasData + "\n");
}
return mHasData;
}
@Override
public T next() {
if (!mHasData) {
read();
}
if (mHasData) {
T ret = mData;
mHasData = false;
mData = null;
return ret;
} else {
return null;
}
}
@Override
public void remove() {
throw new RuntimeException("remove() not supported");
}
private void read() {
if (Config.DEBUG_PRINT) {
System.out.printf("call receiveWithResult\n");
}
Result<T> result = mChan.receiveWithResult();
if (result.ok) {
mData = result.data;
mHasData = true;
if (Config.DEBUG_PRINT) {
System.out.printf("read ok. mData=" + mData + "\n");
}
} else {
mData = null;
mHasData = false;
if (Config.DEBUG_PRINT) {
System.out.printf("read fail\n");
}
}
}
}
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser.java.classinfo;
import gw.lang.GosuShop;
import gw.lang.javadoc.IClassDocNode;
import gw.lang.parser.TypeVarToTypeMap;
import gw.lang.reflect.IAnnotationInfo;
import gw.lang.reflect.IScriptabilityModifier;
import gw.lang.reflect.IType;
import gw.lang.reflect.Modifier;
import gw.lang.reflect.gs.ISourceFileHandle;
import gw.lang.reflect.java.AbstractJavaClassInfo;
import gw.lang.reflect.java.IJavaClassConstructor;
import gw.lang.reflect.java.IJavaClassField;
import gw.lang.reflect.java.IJavaClassInfo;
import gw.lang.reflect.java.IJavaClassMethod;
import gw.lang.reflect.java.IJavaClassType;
import gw.lang.reflect.java.IJavaClassTypeVariable;
import gw.lang.reflect.java.IJavaMethodDescriptor;
import gw.lang.reflect.java.IJavaPropertyDescriptor;
import gw.lang.reflect.java.ITypeInfoResolver;
import gw.lang.reflect.java.JavaTypes;
import gw.lang.reflect.module.IModule;
import java.lang.annotation.Annotation;
public class JavaSourceUnresolvedClass extends AbstractJavaClassInfo implements IJavaClassType, ITypeInfoResolver {
private String _simpleName;
private String _namespace;
private IModule _gosuModule;
private ISourceFileHandle _fileHandle;
public JavaSourceUnresolvedClass( ISourceFileHandle fileHandle, IModule gosuModule ) {
_fileHandle = fileHandle;
_simpleName = fileHandle.getRelativeName();
_namespace = fileHandle.getNamespace();
_gosuModule = gosuModule;
}
@Override
public String getName() {
return _namespace + "." + _simpleName;
}
@Override
public String getSimpleName() {
return _simpleName;
}
@Override
public String getNameSignature() {
return GosuShop.toSignature(getName());
}
@Override
public String getRelativeName() {
return _simpleName;
}
@Override
public String getDisplayName() {
return _simpleName;
}
@Override
public boolean isArray() {
return false;
}
@Override
public boolean isEnum() {
return false;
}
@Override
public boolean isPrimitive() {
return false;
}
@Override
public boolean isAnnotation() {
return false;
}
@Override
public boolean isInterface() {
return false;
}
@Override
public boolean isPublic() {
return true;
}
@Override
public boolean isProtected() {
return false;
}
@Override
public boolean isInternal() {
return false;
}
@Override
public boolean isPrivate() {
return false;
}
@Override
public Object newInstance() throws InstantiationException, IllegalAccessException {
return null;
}
@Override
public IJavaClassMethod getMethod(String methodName, IJavaClassInfo... paramTypes) throws NoSuchMethodException {
return null;
}
@Override
public IJavaClassMethod getDeclaredMethod(String methodName, IJavaClassInfo... params) throws NoSuchMethodException {
return null;
}
@Override
public IJavaClassMethod[] getDeclaredMethods() {
return new IJavaClassMethod[0];
}
@Override
public IJavaMethodDescriptor[] getMethodDescriptors() {
return new IJavaMethodDescriptor[0];
}
@Override
public IJavaClassInfo[] getInterfaces() {
return new IJavaClassInfo[0];
}
@Override
public IJavaClassType[] getGenericInterfaces() {
return new IJavaClassType[0];
}
@Override
public IJavaClassInfo getSuperclass() {
return JavaTypes.OBJECT().getBackingClassInfo();
}
@Override
public IJavaClassType getGenericSuperclass() {
return JavaTypes.OBJECT().getBackingClassInfo();
}
@Override
public IJavaClassField[] getDeclaredFields() {
return new IJavaClassField[0];
}
@Override
public IJavaClassField[] getFields() {
return new IJavaClassField[0];
}
@Override
public Object[] getEnumConstants() {
return new Object[0];
}
@Override
public IJavaPropertyDescriptor[] getPropertyDescriptors() {
return new IJavaPropertyDescriptor[0];
}
@Override
public IJavaClassConstructor[] getDeclaredConstructors() {
return new IJavaClassConstructor[0];
}
@Override
public IJavaClassConstructor getConstructor( IJavaClassInfo... params ) throws NoSuchMethodException {
return null;
}
@Override
public IType getJavaType() {
return JavaTypes.OBJECT();
}
@Override
public IJavaClassTypeVariable[] getTypeParameters() {
return new IJavaClassTypeVariable[0];
}
@Override
public IClassDocNode createClassDocNode() {
return null;
}
@Override
public boolean hasCustomBeanInfo() {
return false;
}
@Override
public boolean isVisibleViaFeatureDescriptor(IScriptabilityModifier constraint) {
return false;
}
@Override
public boolean isHiddenViaFeatureDescriptor() {
return false;
}
@Override
public IJavaClassInfo getComponentType() {
return null;
}
@Override
public int getModifiers() {
return Modifier.PUBLIC;
}
@Override
public IType getEnclosingType() {
return null;
}
@Override
public IJavaClassInfo getArrayType() {
return null;
}
@Override
public IJavaClassInfo[] getDeclaredClasses() {
return new IJavaClassInfo[0];
}
@Override
public Class getBackingClass() {
return null;
}
@Override
public ISourceFileHandle getSourceFileHandle() {
return _fileHandle;
}
@Override
public boolean isAnnotationPresent(Class<? extends Annotation> annotationClass) {
return false;
}
@Override
public IAnnotationInfo getAnnotation(Class<? extends Annotation> annotationClass) {
return null;
}
@Override
public IAnnotationInfo[] getDeclaredAnnotations() {
return new IAnnotationInfo[0];
}
@Override
public IJavaClassInfo getEnclosingClass() {
return null;
}
@Override
public IType getActualType(TypeVarToTypeMap typeMap) {
return getJavaType();
}
@Override
public IType getActualType(TypeVarToTypeMap typeMap, boolean bKeepTypeVars) {
return getJavaType();
}
@Override
public IJavaClassType getConcreteType() {
return this;
}
@Override
public IJavaClassType resolveType(String relativeName, int ignoreFlags) {
return null;
}
@Override
public IJavaClassType resolveType(String relativeName, IJavaClassInfo whosAskin, int ignoreFlags) {
return null;
}
@Override
public IJavaClassType resolveImport(String relativeName) {
return null;
}
@Override
public IModule getModule() {
return _gosuModule;
}
@Override
public String getNamespace() {
return _namespace;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.