answer
stringlengths
17
10.2M
package org.basex.test.rest; import static org.basex.core.Text.*; import static org.basex.util.Token.*; import static org.junit.Assert.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.basex.api.BaseXHTTP; import org.basex.build.Parser; import org.basex.core.BaseXException; import org.basex.core.Command; import org.basex.core.Context; import org.basex.core.Prop; import org.basex.core.Text; import org.basex.core.cmd.XQuery; import org.basex.io.IO; import org.basex.io.IOContent; import org.basex.query.QueryException; import org.basex.query.func.FNSimple; import org.basex.query.item.ANode; import org.basex.query.item.AtomType; import org.basex.query.item.B64; import org.basex.query.item.Bln; import org.basex.query.item.DBNode; import org.basex.query.item.FElem; import org.basex.query.item.FTxt; import org.basex.query.item.Hex; import org.basex.query.item.NodeType; import org.basex.query.item.QNm; import org.basex.query.item.Str; import org.basex.query.iter.ItemCache; import org.basex.query.iter.Iter; import org.basex.query.iter.NodeIter; import org.basex.query.iter.ValueIter; import org.basex.query.util.Err; import org.basex.query.util.http.HTTPClient; import org.basex.query.util.http.Request; import org.basex.query.util.http.Request.Part; import org.basex.query.util.http.RequestParser; import org.basex.query.util.http.ResponseHandler; import org.basex.util.Util; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public final class HTTPClientTest { /** Status code. */ private static final byte[] STATUS = token("status"); /** Body attribute media-type. */ private static final byte[] MEDIATYPE = token("media-type"); /** Body attribute method. */ private static final byte[] METHOD = token("method"); /** Example url. */ private static final String URL = "'http://localhost:8984/rest/books'"; /** Carriage return/Line feed. */ private static final String CRLF = "\r\n"; /** Database context. */ static Context context; /** HTTP servers. */ private static BaseXHTTP http; /** * Prepare test. * @throws Exception exception */ @BeforeClass public static void start() throws Exception { context = new Context(); context.prop.set(Prop.CACHEQUERY, true); http = new BaseXHTTP("-czWU" + ADMIN + " -P" + ADMIN); } /** * Finish test. * @throws Exception exception */ @AfterClass public static void stop() throws Exception { context.close(); http.stop(); } /** * Creates a test database. * @throws BaseXException database exception */ @Before public void init() throws BaseXException { final Command put = new XQuery("http:send-request(" + "<http:request method='put' status-only='true'>" + "<http:body media-type='text/xml'>" + "<books>" + "<book id='1'>" + "<name>Sherlock Holmes</name>" + "<author>Doyle</author>" + "</book>" + "<book id='2'>" + "<name>Winnetou</name>" + "<author>May</author>" + "</book>" + "<book id='3'>" + "<name>Tom Sawyer</name>" + "<author>Twain</author>" + "</book>" + "</books>" + "</http:body>" + "</http:request>, " + URL + ")"); put.execute(context); } /** * Deletes the test database. * @throws BaseXException database exception */ @After public void finish() throws BaseXException { final Command delete = new XQuery("http:send-request(" + "<http:request method='delete' status-only='true'/>, " + URL + ")"); delete.execute(context); } /** * Test sending of HTTP PUT requests. * @throws Exception exception */ @Test public void testPUT() throws Exception { final Command put = new XQuery("http:send-request(" + "<http:request method='put' status-only='true'>" + "<http:body media-type='text/xml'>" + "<books>" + "<book id='1'>" + "<name>Sherlock Holmes</name>" + "<author>Doyle</author>" + "</book>" + "<book id='2'>" + "<name>Winnetou</name>" + "<author>May</author>" + "</book>" + "<book id='3'>" + "<name>Tom Sawyer</name>" + "<author>Twain</author>" + "</book>" + "</books>" + "</http:body>" + "</http:request>, " + URL + ")"); put.execute(context); checkResponse(put, HttpURLConnection.HTTP_CREATED, 1); } /** * Test sending of HTTP POST Query requests. * @throws Exception exception */ @Test public void testPOSTQuery() throws Exception { // POST - query final Command postQuery = new XQuery("http:send-request(" + "<http:request method='post'>" + "<http:body media-type='application/query+xml'>" + "<query xmlns='" + Text.URL + "/rest'>" + "<text>1</text>" + "<parameter name='wrap' value='yes'/>" + "</query>" + "</http:body>" + "</http:request>, " + URL + ")"); postQuery.execute(context); checkResponse(postQuery, HttpURLConnection.HTTP_OK, 2); // Execute the same query but with content set from $bodies final Command postQuery2 = new XQuery("http:send-request(" + "<http:request method='post'>" + "<http:body media-type='application/query+xml'/></http:request>" + "," + URL + "," + "<query xmlns='" + Text.URL + "/rest'>" + "<text>1</text>" + "<parameter name='wrap' value='yes'/>" + "</query>)"); postQuery2.execute(context); checkResponse(postQuery2, HttpURLConnection.HTTP_OK, 2); } /** * Test sending of HTTP POST Add requests. * @throws Exception exception */ @Test public void testPOSTAdd() throws Exception { // POST - add content final Command postAdd = new XQuery("http:send-request(" + "<http:request method='post' status-only='true'>" + "<http:body media-type='text/xml'>" + "<book id='4'>" + "<name>The Celebrated Jumping Frog of Calaveras County</name>" + "<author>Twain</author>" + "</book>" + "</http:body>" + "</http:request>, " + URL + ")"); postAdd.execute(context); checkResponse(postAdd, HttpURLConnection.HTTP_CREATED, 1); } /** * Test sending of HTTP GET requests. * @throws Exception exception */ @Test public void testPOSTGet() throws Exception { // GET1 - just send a GET request final Command get1 = new XQuery("http:send-request(" + "<http:request method='get' href=" + URL + "/>)"); get1.execute(context); checkResponse(get1, HttpURLConnection.HTTP_OK, 2); assertTrue(((ItemCache) get1.result()).item[1].type == NodeType.DOC); // GET2 - with override-media-type='text/plain' final Command get2 = new XQuery("http:send-request(" + "<http:request method='get' override-media-type='text/plain'/>," + URL + ")"); get2.execute(context); checkResponse(get2, HttpURLConnection.HTTP_OK, 2); assertTrue(((ItemCache) get2.result()).item[1].type == AtomType.STR); // Get3 - with status-only='true' final Command get3 = new XQuery("http:send-request(" + "<http:request method='get' status-only='true'/>," + URL + ")"); get3.execute(context); checkResponse(get3, HttpURLConnection.HTTP_OK, 1); } /** * Test sending of HTTP DELETE requests. * @throws Exception exception */ @Test public void testPOSTDelete() throws Exception { // DELETE final Command delete = new XQuery("http:send-request(" + "<http:request method='delete' status-only='true'/>, " + URL + ")"); delete.execute(context); checkResponse(delete, HttpURLConnection.HTTP_OK, 1); } /** * Test sending of HTTP request without any attributes - error shall be thrown * that mandatory attributes are missing. */ @Test public void sendEmptyReq() { final Command c = new XQuery("http:send-request(<http:request/>)"); try { c.execute(context); } catch(final BaseXException ex) { assertTrue(indexOf(token(ex.getMessage()), token(Err.ErrType.FOHC.toString())) != -1); } } /** * Tests http:send-request((),()). */ @Test public void testSendReqNoParams() { final Command c = new XQuery("http:send-request(())"); try { c.execute(context); } catch(final BaseXException ex) { assertTrue(indexOf(token(ex.getMessage()), token(Err.ErrType.FOHC.toString())) != -1); } } /** * Tests RequestParser.parse() with normal(not multipart) request. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testParseRequest() throws IOException, QueryException { // Simple HTTP request with no errors final byte[] req = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='POST' href='http: + "<http:header name='hdr1' value='hdr1val'/>" + "<http:header name='hdr2' value='hdr2val'/>" + "<http:body media-type='text/xml'>" + "Test body content" + "</http:body>" + "</http:request>"); final IO io = new IOContent(req); final Parser reqParser = Parser.xmlParser(io, context.prop, ""); final DBNode dbNode = new DBNode(reqParser, context.prop); final Request r = RequestParser.parse(dbNode.children().next(), null, null); assertTrue(r.attrs.size() == 2); assertTrue(r.headers.size() == 2); assertTrue(r.bodyContent.size() != 0); assertTrue(r.payloadAttrs.size() == 1); } /** * Tests RequestParser.parse() with multipart request. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testParseMultipartReq() throws IOException, QueryException { final byte[] multiReq = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='POST' href='http: + "<http:header name='hdr1' value='hdr1val'/>" + "<http:header name='hdr2' value='hdr2val'/>" + "<http:multipart media-type='multipart/mixed' boundary='xxxx'>" + "<part>" + "<http:header name='p1hdr1' value='p1hdr1val'/>" + "<http:header name='p1hdr2' value='p1hdr2val'/>" + "<http:body media-type='text/plain'>" + "Part1" + "</http:body>" + "</part>" + "<part>" + "<http:header name='p2hdr1' value='p2hdr1val'/>" + "<http:body media-type='text/plain'>" + "Part2" + "</http:body>" + "</part>" + "<part>" + "<http:body media-type='text/plain'>" + "Part3" + "</http:body>" + "</part>" + "</http:multipart>" + "</http:request>"); final IO io = new IOContent(multiReq); final Parser p = Parser.xmlParser(io, context.prop, ""); final DBNode dbNode1 = new DBNode(p, context.prop); final Request r = RequestParser.parse(dbNode1.children().next(), null, null); assertTrue(r.attrs.size() == 2); assertTrue(r.headers.size() == 2); assertTrue(r.isMultipart); assertTrue(r.parts.size() == 3); // check parts final Iterator<Part> i = r.parts.iterator(); Part part = null; part = i.next(); assertTrue(part.headers.size() == 2); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); part = i.next(); assertTrue(part.headers.size() == 1); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); part = i.next(); assertTrue(part.headers.size() == 0); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); } /** * Tests parsing of multipart request when the contents for each part are set * from the $bodies parameter. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testParseMultipartReqBodies() throws IOException, QueryException { final byte[] multiReq = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='POST' href='http: + "<http:header name='hdr1' value='hdr1val'/>" + "<http:header name='hdr2' value='hdr2val'/>" + "<http:multipart media-type='multipart/mixed' boundary='xxxx'>" + "<part>" + "<http:header name='p1hdr1' value='p1hdr1val'/>" + "<http:header name='p1hdr2' value='p1hdr2val'/>" + "<http:body media-type='text/plain'/>" + "</part>" + "<part>" + "<http:header name='p2hdr1' value='p2hdr1val'/>" + "<http:body media-type='text/plain'/>" + "</part>" + "<part>" + "<http:body media-type='text/plain'/>" + "</part>" + "</http:multipart>" + "</http:request>"); final IO io = new IOContent(multiReq); final Parser p = Parser.xmlParser(io, context.prop, ""); final DBNode dbNode1 = new DBNode(p, context.prop); final ItemCache bodies = new ItemCache(); bodies.add(Str.get("Part1")); bodies.add(Str.get("Part2")); bodies.add(Str.get("Part3")); final Request r = RequestParser.parse(dbNode1.children().next(), bodies, null); assertTrue(r.attrs.size() == 2); assertTrue(r.headers.size() == 2); assertTrue(r.isMultipart); assertTrue(r.parts.size() == 3); // check parts final Iterator<Part> i = r.parts.iterator(); Part part = i.next(); assertTrue(part.headers.size() == 2); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); part = i.next(); assertTrue(part.headers.size() == 1); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); part = i.next(); assertTrue(part.headers.size() == 0); assertTrue(part.bodyContent.size() == 1); assertTrue(part.bodyAttrs.size() == 1); } /** * Tests if errors are thrown when some mandatory attributes are missing in a * <http:request/>, <http:body/> or <http:multipart/>. * @throws IOException IO exception */ @Test public void testErrors() throws IOException { // Incorrect requests final List<byte[]> falseReqs = new ArrayList<byte[]>(); // Request without method final byte[] falseReq1 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "href='http: falseReqs.add(falseReq1); // Request with send-authorization and no credentials final byte[] falseReq2 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='GET' href='http: + "send-authorization='true'/>"); falseReqs.add(falseReq2); // Request with send-authorization and only username final byte[] falseReq3 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='GET' href='http: + "send-authorization='true' username='test'/>"); falseReqs.add(falseReq3); // Request with body that has no media-type final byte[] falseReq4 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='POST' href='http: + "</http:body>" + "</http:request>"); falseReqs.add(falseReq4); // Request with multipart that has no media-type final byte[] falseReq5 = token("<http:request method='POST' " + "xmlns:http=\"http://expath.org/ns/http\" " + "href='http: + "</http:multipart>" + "</http:request>"); falseReqs.add(falseReq5); // Request with multipart with part that has a body without media-type final byte[] falseReq6 = token("<http:request method='POST' " + "xmlns:http=\"http://expath.org/ns/http\" " + "href='http: + "<part>" + "<http:header name='hdr1' value-='val1'/>" + "<http:body media-type='text/plain'>" + "Part1" + "</http:body>" + "</part>" + "<part>" + "<http:header name='hdr1' value-='val1'/>" + "<http:body>" + "Part1" + "</http:body>" + "</part>" + "</http:multipart>" + "</http:request>"); falseReqs.add(falseReq6); // Request with schema different from http final byte[] falseReq7 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "href='ftp: falseReqs.add(falseReq7); // Request with content and method which must be empty final byte[] falseReq8 = token("<http:request " + "xmlns:http=\"http://expath.org/ns/http\" " + "method='DELETE' href='http: + "<http:body media-type='text/plain'>" + "</http:body>" + "</http:request>"); falseReqs.add(falseReq8); final Iterator<byte[]> i = falseReqs.iterator(); IO io = null; Parser p = null; DBNode dbNode; byte[] it; while(i.hasNext()) { it = i.next(); io = new IOContent(it); p = Parser.xmlParser(io, context.prop, ""); dbNode = new DBNode(p, context.prop); try { RequestParser.parse(dbNode.children().next(), null, null); fail("Exception not thrown"); } catch(final QueryException ex) { assertTrue(indexOf(token(ex.getMessage()), token(Err.ErrType.FOHC.toString())) != -1); } } } /** * Tests method setRequestContent of HttpClient. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testWriteMultipartMessage() throws IOException, QueryException { final Request req = new Request(); req.isMultipart = true; req.payloadAttrs.add(token("media-type"), token("multipart/alternative")); req.payloadAttrs.add(token("boundary"), token("boundary42")); final Part p1 = new Part(); p1.headers.add(token("Content-Type"), token("text/plain; " + "charset=us-ascii")); p1.bodyAttrs.add(token("media-type"), token("text/plain")); p1.bodyContent.add(Str.get(token("...plain text version of message " + "goes here....\n"))); final Part p2 = new Part(); p2.headers.add(token("Content-Type"), token("text/richtext")); p2.bodyAttrs.add(token("media-type"), token("text/richtext")); p2.bodyContent.add(Str.get(token(".... richtext version " + "of same message goes here ..."))); final Part p3 = new Part(); p3.headers.add(token("Content-Type"), token("text/x-whatever")); p3.bodyAttrs.add(token("media-type"), token("text/x-whatever")); p3.bodyContent.add(Str.get(token(".... fanciest formatted version " + "of same message goes here..."))); req.parts.add(p1); req.parts.add(p2); req.parts.add(p3); final FakeHttpConnection fakeConn = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn.getOutputStream(), req, null); final String expResult = "--boundary42" + CRLF + "Content-Type: text/plain; charset=us-ascii" + CRLF + CRLF + "...plain text version of message goes here....\n" + CRLF + "--boundary42" + CRLF + "Content-Type: text/richtext" + CRLF + CRLF + ".... richtext version of same message goes here ..." + CRLF + "--boundary42" + CRLF + "Content-Type: text/x-whatever" + CRLF + CRLF + ".... fanciest formatted version of same message goes here..." + CRLF + "--boundary42--" + CRLF; // Compare results final String fake = fakeConn.getOutputStream().toString(); assertTrue(expResult.equals(fake)); } /** * Tests writing of request content with different combinations of the body * attributes media-type and method. * @throws IOException IO execption * @throws QueryException query exception */ @Test public void testWriteMessage() throws IOException, QueryException { // Case 1: No method, media-type='text/xml' final Request req1 = new Request(); final FakeHttpConnection fakeConn1 = new FakeHttpConnection(new URL( "http: req1.payloadAttrs.add(MEDIATYPE, token("text/xml")); // Node child final FElem e1 = new FElem(new QNm(token("a"))); e1.add(new FTxt(token("a"))); req1.bodyContent.add(e1); // String item child req1.bodyContent.add(Str.get("<b>b</b>")); HTTPClient.setRequestContent(fakeConn1.getOutputStream(), req1, null); assertTrue(eq(fakeConn1.out.toByteArray(), token("<a>a</a> &lt;b&gt;b&lt;/b&gt;"))); // Case 2: No method, media-type='text/plain' final Request req2 = new Request(); final FakeHttpConnection fakeConn2 = new FakeHttpConnection(new URL( "http: req2.payloadAttrs.add(MEDIATYPE, token("text/plain")); // Node child final FElem e2 = new FElem(new QNm(token("a"))); e2.add(new FTxt(token("a"))); req2.bodyContent.add(e2); // String item child req2.bodyContent.add(Str.get("<b>b</b>")); HTTPClient.setRequestContent(fakeConn2.getOutputStream(), req2, null); assertTrue(eq(fakeConn2.out.toByteArray(), token("a&lt;b&gt;b&lt;/b&gt;"))); // Case 3: method='text', media-type='text/xml' final Request req3 = new Request(); final FakeHttpConnection fakeConn3 = new FakeHttpConnection(new URL( "http: req3.payloadAttrs.add(MEDIATYPE, token("text/xml")); req3.payloadAttrs.add(token("method"), token("text")); // Node child final FElem e3 = new FElem(new QNm(token("a"))); e3.add(new FTxt(token("a"))); req3.bodyContent.add(e3); // String item child req3.bodyContent.add(Str.get("<b>b</b>")); HTTPClient.setRequestContent(fakeConn3.getOutputStream(), req3, null); assertTrue(eq(fakeConn3.out.toByteArray(), token("a&lt;b&gt;b&lt;/b&gt;"))); } /** * Tests writing of body content when @method is http:base64Binary. * @throws QueryException query exception * @throws IOException IO exception */ @Test public void testWriteBase64() throws IOException, QueryException { // Case 1: content is xs:base64Binary final Request req1 = new Request(); req1.payloadAttrs.add(METHOD, token("http:base64Binary")); req1.bodyContent.add(new B64(token("dGVzdA=="))); final FakeHttpConnection fakeConn1 = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn1.getOutputStream(), req1, null); assertTrue(eq(token("dGVzdA=="), fakeConn1.out.toByteArray())); // Case 2: content is a node final Request req2 = new Request(); req2.payloadAttrs.add(METHOD, token("http:base64Binary")); final FElem e3 = new FElem(new QNm(token("a"))); e3.add(new FTxt(token("dGVzdA=="))); req2.bodyContent.add(e3); final FakeHttpConnection fakeConn2 = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn2.getOutputStream(), req2, null); assertTrue(eq(token("dGVzdA=="), fakeConn2.out.toByteArray())); } /** * Tests writing of body content when @method is http:hexBinary. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testWriteHex() throws IOException, QueryException { // Case 1: content is xs:hexBinary final Request req1 = new Request(); req1.payloadAttrs.add(METHOD, token("http:hexBinary")); req1.bodyContent.add(new Hex(token("74657374"))); final FakeHttpConnection fakeConn1 = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn1.getOutputStream(), req1, null); assertTrue(eq(token("74657374"), fakeConn1.out.toByteArray())); // Case 2: content is a node final Request req2 = new Request(); req2.payloadAttrs.add(METHOD, token("http:base64Binary")); final FElem e3 = new FElem(new QNm(token("a"))); e3.add(new FTxt(token("74657374"))); req2.bodyContent.add(e3); final FakeHttpConnection fakeConn2 = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn2.getOutputStream(), req2, null); assertTrue(eq(token("74657374"), fakeConn2.out.toByteArray())); } /** * Tests writing of request content when @src is set. * @throws QueryException query exception * @throws IOException IO exception */ @Test public void testWriteFromResource() throws IOException, QueryException { // Create a file form which will be read final File f = new File(Prop.TMP + Util.name(HTTPClientTest.class)); final FileOutputStream out = new FileOutputStream(f); out.write(token("test")); out.close(); // Request final Request req = new Request(); req.payloadAttrs.add(token("src"), token("file:" + f.getPath())); // HTTP connection final FakeHttpConnection fakeConn = new FakeHttpConnection(new URL( "http: HTTPClient.setRequestContent(fakeConn.getOutputStream(), req, null); // Delete file f.delete(); assertTrue(eq(token("test"), fakeConn.out.toByteArray())); } /** * Tests response handling with specified charset in the header * 'Content-Type'. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void getResponseWithCharset() throws IOException, QueryException { // Create fake HTTP connection final FakeHttpConnection conn = new FakeHttpConnection(new URL( "http: final String test = "\u0442\u0435\u0441\u0442"; // Set content type conn.contentType = "text/plain; charset=CP1251"; // set content encoded in CP1251 conn.content = Charset.forName("CP1251").encode(test).array(); final Iter i = ResponseHandler.getResponse(conn, Bln.FALSE.atom(null), null, context.prop, null); // compare results assertTrue(eq(i.get(1).atom(null), token(test))); } /** * Tests ResponseHandler.getResponse() with multipart response. * @throws IOException IO exception * @throws QueryException query exception */ @Test public void testGetMultipartResponse() throws IOException, QueryException { // Create fake HTTP connection final FakeHttpConnection conn = new FakeHttpConnection(new URL( "http: final Map<String, List<String>> hdrs = new HashMap<String, List<String>>(); final List<String> fromVal = new ArrayList<String>(); fromVal.add("Nathaniel Borenstein <nsb@bellcore.com>"); // From: Nathaniel Borenstein <nsb@bellcore.com> hdrs.put("From", fromVal); final List<String> mimeVal = new ArrayList<String>(); mimeVal.add("1.0"); hdrs.put("MIME-version", mimeVal); final List<String> subjVal = new ArrayList<String>(); subjVal.add("Formatted text mail"); // Subject: Formatted text mail hdrs.put("Subject", subjVal); final List<String> contTypeVal = new ArrayList<String>(); contTypeVal.add("multipart/alternative"); contTypeVal.add("boundary=\"boundary42\""); // Content-Type: multipart/alternative; boundary=boundary42 hdrs.put("Content-Type", contTypeVal); conn.headers = hdrs; conn.contentType = "multipart/alternative; boundary=\"boundary42\""; conn.content = token("--boundary42" + CRLF + "Content-Type: text/plain; charset=us-ascii" + CRLF + CRLF + "...plain text version of message goes here...." + CRLF + CRLF + "--boundary42\r" + NL + "Content-Type: text/richtext" + CRLF + CRLF + ".... richtext version of same message goes here ..." + CRLF + "--boundary42\r" + NL + "Content-Type: text/x-whatever" + CRLF + CRLF + ".... fanciest formatted version of same " + "message goes here\n..." + CRLF + "--boundary42 final Iter i = ResponseHandler.getResponse(conn, Bln.FALSE.atom(null), null, context.prop, null); // Construct expected result final ItemCache resultIter = new ItemCache(); final byte[] reqItem = token("<http:response " + "xmlns:http=\"http://expath.org/ns/http\" " + "status=\"200\" message=\"OK\">" + "<http:header name=\"Subject\" value=\"Formatted text mail\"/>" + "<http:header name=\"Content-Type\" " + "value=\"multipart/alternative;boundary=&quot;boundary42&quot;\"/>" + "<http:header name=\"MIME-version\" value=\"1.0\"/>" + "<http:header name=\"From\" value=\"Nathaniel Borenstein " + "&lt;nsb@bellcore.com&gt;\"/>" + "<http:multipart media-type=\"multipart/alternative\" " + "boundary=\"boundary42\">" + "<part>" + "<http:header name=\"Content-Type\" " + "value=\"text/plain; charset=us-ascii\"/>" + "<http:body media-type=\"text/plain; charset=us-ascii\"/>" + "</part>" + "<part>" + "<http:header name=\"Content-Type\" value=\"text/richtext\"/>" + "<http:body media-type=\"text/richtext\"/>" + "</part>" + "<part>" + "<http:header name=\"Content-Type\" value=\"text/x-whatever\"/>" + "<http:body media-type=\"text/x-whatever\"/>" + "</part>" + "</http:multipart>" + "</http:response> "); final IO io = new IOContent(reqItem); final Parser reqParser = Parser.xmlParser(io, context.prop, ""); final DBNode dbNode = new DBNode(reqParser, context.prop); resultIter.add(dbNode.children().next()); resultIter.add(Str.get(token("...plain text version of message " + "goes here....\n\n"))); resultIter.add(Str.get(token(".... richtext version of same message " + "goes here ...\n"))); resultIter.add(Str.get(token(".... fanciest formatted version of same " + "message goes here\n...\n"))); // Compare response with expected result assertTrue(FNSimple.deep(null, resultIter, i)); } /** * Tests ResponseHandler.getResponse() with multipart response having preamble * and epilogue. * @throws IOException IO Exception * @throws QueryException query exception */ @Test public void testGetMutipartRespPreamble() throws IOException, QueryException { // Create fake HTTP connection final FakeHttpConnection conn = new FakeHttpConnection(new URL( "http: final Map<String, List<String>> hdrs = new HashMap<String, List<String>>(); final List<String> fromVal = new ArrayList<String>(); fromVal.add("Nathaniel Borenstein <nsb@bellcore.com>"); // From: Nathaniel Borenstein <nsb@bellcore.com> hdrs.put("From", fromVal); final List<String> mimeVal = new ArrayList<String>(); mimeVal.add("1.0"); final List<String> toVal = new ArrayList<String>(); toVal.add("Ned Freed <ned@innosoft.com>"); // To: Ned Freed <ned@innosoft.com> hdrs.put("To", toVal); hdrs.put("MIME-version", mimeVal); final List<String> subjVal = new ArrayList<String>(); subjVal.add("Formatted text mail"); // Subject: Formatted text mail hdrs.put("Subject", subjVal); final List<String> contTypeVal = new ArrayList<String>(); contTypeVal.add("multipart/mixed"); contTypeVal.add("boundary=\"simple boundary\""); // Content-Type: multipart/alternative; boundary=boundary42 hdrs.put("Content-Type", contTypeVal); conn.headers = hdrs; conn.contentType = "multipart/mixed; boundary=\"simple boundary\""; // Response to be read conn.content = token("This is the preamble. " + "It is to be ignored, though it" + NL + "is a handy place for mail composers to include an" + CRLF + "explanatory note to non-MIME compliant readers." + CRLF + "--simple boundary" + CRLF + CRLF + "This is implicitly typed plain ASCII text." + CRLF + "It does NOT end with a linebreak." + CRLF + "--simple boundary" + CRLF + "Content-type: text/plain; charset=us-ascii" + CRLF + CRLF + "This is explicitly typed plain ASCII text." + CRLF + "It DOES end with a linebreak." + CRLF + CRLF + "--simple boundary--" + CRLF + "This is the epilogue. It is also to be ignored."); // Get response as sequence of XQuery items final Iter i = ResponseHandler.getResponse(conn, Bln.FALSE.atom(null), null, context.prop, null); // Construct expected result final ItemCache resultIter = new ItemCache(); final byte[] reqItem = token("<http:response " + "xmlns:http=\"http://expath.org/ns/http\" " + "status=\"200\" message=\"OK\">" + "<http:header name=\"Subject\" value=\"Formatted text mail\"/>" + "<http:header name=\"To\" value=\"Ned " + "Freed &lt;ned@innosoft.com&gt;\"/>" + "<http:header name=\"Content-Type\" value=\"multipart/mixed;" + "boundary=&quot;simple boundary&quot;\"/>" + "<http:header name=\"MIME-version\" value=\"1.0\"/>" + "<http:header name=\"From\" value=\"Nathaniel Borenstein " + "&lt;nsb@bellcore.com&gt;\"/>" + "<http:multipart media-type=\"multipart/mixed\" " + "boundary=\"simple boundary\">" + "<part>" + "<http:body media-type=\"text/plain\"/>" + "</part>" + "<part>" + "<http:header name=\"Content-type\" value=\"text/plain; " + "charset=us-ascii\"/>" + "<http:body media-type=\"text/plain; charset=us-ascii\"/>" + "</part>" + "</http:multipart>" + "</http:response>"); final IO io = new IOContent(reqItem); final Parser reqParser = Parser.xmlParser(io, context.prop, ""); final DBNode dbNode = new DBNode(reqParser, context.prop); resultIter.add(dbNode.children().next()); resultIter.add(Str.get(token("This is implicitly typed plain ASCII text.\n" + "It does NOT end with a linebreak.\n"))); resultIter.add(Str.get(token("This is explicitly typed plain ASCII text.\n" + "It DOES end with a linebreak.\n\n"))); // Compare response with expected result assertTrue(FNSimple.deep(null, resultIter, i)); } /** * Checks the response to an HTTP request. * @param c command * @param expStatus expected status * @param itemsCount expected number of items * @throws QueryException query exception */ private void checkResponse(final Command c, final int expStatus, final int itemsCount) throws QueryException { assertTrue(c.result() instanceof ValueIter); final ValueIter res = (ValueIter) c.result(); assertEquals(itemsCount, res.size()); assertTrue(res.get(0) instanceof FElem); final FElem response = (FElem) res.get(0); assertNotNull(response.attributes()); final NodeIter resAttr = response.attributes(); for(ANode attr; (attr = resAttr.next()) != null;) { if(eq(attr.nname(), STATUS)) { assertTrue(eq(attr.atom(), token(expStatus))); } } } } final class FakeHttpConnection extends HttpURLConnection { /** Request headers. */ Map<String, List<String>> headers; /** Content-type. */ String contentType; /** Content. */ byte[] content; /** Connection output stream. */ ByteArrayOutputStream out; /** * Constructor. * @param u uri */ FakeHttpConnection(final URL u) { super(u); out = new ByteArrayOutputStream(); headers = new HashMap<String, List<String>>(); } @Override public ByteArrayInputStream getInputStream() { return new ByteArrayInputStream(content); } @Override public String getContentType() { return contentType; } @Override public int getResponseCode() { return 200; } @Override public String getResponseMessage() { return "OK"; } @Override public Map<String, List<String>> getHeaderFields() { return headers; } @Override public String getHeaderField(final String field) { final List<String> values = headers.get(field); final StringBuilder sb = new StringBuilder(); final Iterator<String> i = values.iterator(); while(i.hasNext()) { sb.append(i.next()).append(';'); } return sb.substring(0, sb.length() - 1); } @Override public OutputStream getOutputStream() { return out; } @Override public void disconnect() { } @Override public boolean usingProxy() { return false; } @Override public void connect() { } }
package com.google.refine.importing; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.zip.GZIPInputStream; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.fileupload.FileItem; import org.apache.commons.fileupload.ProgressListener; import org.apache.commons.fileupload.disk.DiskFileItemFactory; import org.apache.commons.fileupload.servlet.ServletFileUpload; import org.apache.commons.fileupload.util.Streams; import org.apache.commons.io.FileCleaningTracker; import org.apache.tools.bzip2.CBZip2InputStream; import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarInputStream; import org.json.JSONArray; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.ibm.icu.text.NumberFormat; import com.google.refine.ProjectManager; import com.google.refine.ProjectMetadata; import com.google.refine.RefineServlet; import com.google.refine.importing.ImportingManager.Format; import com.google.refine.importing.UrlRewriter.Result; import com.google.refine.model.Project; import com.google.refine.util.JSONUtilities; public class ImportingUtilities { final static protected Logger logger = LoggerFactory.getLogger("importing-utilities"); static public interface Progress { public void setProgress(String message, int percent); public boolean isCanceled(); } static public void loadDataAndPrepareJob( HttpServletRequest request, HttpServletResponse response, Properties parameters, final ImportingJob job, JSONObject config) throws IOException, ServletException { JSONObject retrievalRecord = new JSONObject(); JSONUtilities.safePut(config, "retrievalRecord", retrievalRecord); JSONUtilities.safePut(config, "state", "loading-raw-data"); final JSONObject progress = new JSONObject(); JSONUtilities.safePut(config, "progress", progress); try { ImportingUtilities.retrieveContentFromPostRequest( request, parameters, job.getRawDataDir(), retrievalRecord, new Progress() { @Override public void setProgress(String message, int percent) { if (message != null) { JSONUtilities.safePut(progress, "message", message); } JSONUtilities.safePut(progress, "percent", percent); } @Override public boolean isCanceled() { return job.canceled; } } ); } catch (Exception e) { JSONUtilities.safePut(config, "state", "error"); JSONUtilities.safePut(config, "error", "Error uploading data"); JSONUtilities.safePut(config, "errorDetails", e.getLocalizedMessage()); return; } JSONArray fileSelectionIndexes = new JSONArray(); JSONUtilities.safePut(config, "fileSelection", fileSelectionIndexes); String bestFormat = ImportingUtilities.autoSelectFiles(job, retrievalRecord, fileSelectionIndexes); bestFormat = ImportingUtilities.guessBetterFormat(job, bestFormat); JSONArray rankedFormats = new JSONArray(); JSONUtilities.safePut(config, "rankedFormats", rankedFormats); ImportingUtilities.rankFormats(job, bestFormat, rankedFormats); JSONUtilities.safePut(config, "state", "ready"); JSONUtilities.safePut(config, "hasData", true); config.remove("progress"); } static public void updateJobWithNewFileSelection(ImportingJob job, JSONArray fileSelectionArray) { JSONUtilities.safePut(job.config, "fileSelection", fileSelectionArray); String bestFormat = ImportingUtilities.getCommonFormatForSelectedFiles(job, fileSelectionArray); bestFormat = ImportingUtilities.guessBetterFormat(job, bestFormat); JSONArray rankedFormats = new JSONArray(); JSONUtilities.safePut(job.config, "rankedFormats", rankedFormats); ImportingUtilities.rankFormats(job, bestFormat, rankedFormats); } static public void retrieveContentFromPostRequest( HttpServletRequest request, Properties parameters, File rawDataDir, JSONObject retrievalRecord, final Progress progress ) throws Exception { JSONArray fileRecords = new JSONArray(); JSONUtilities.safePut(retrievalRecord, "files", fileRecords); int clipboardCount = 0; int uploadCount = 0; int downloadCount = 0; int archiveCount = 0; // This tracks the total progress, which involves uploading data from the client // as well as downloading data from URLs. final SavingUpdate update = new SavingUpdate() { @Override public void savedMore() { progress.setProgress(null, calculateProgressPercent(totalExpectedSize, totalRetrievedSize)); } @Override public boolean isCanceled() { return progress.isCanceled(); } }; DiskFileItemFactory fileItemFactory = new DiskFileItemFactory(); fileItemFactory.setFileCleaningTracker(new FileCleaningTracker()); ServletFileUpload upload = new ServletFileUpload(fileItemFactory); upload.setProgressListener(new ProgressListener() { boolean setContentLength = false; long lastBytesRead = 0; @Override public void update(long bytesRead, long contentLength, int itemCount) { if (!setContentLength) { // Only try to set the content length if we really know it. if (contentLength >= 0) { update.totalExpectedSize += contentLength; setContentLength = true; } } if (setContentLength) { update.totalRetrievedSize += (bytesRead - lastBytesRead); lastBytesRead = bytesRead; update.savedMore(); } } }); progress.setProgress("Uploading data ...", -1); parts: for (Object obj : upload.parseRequest(request)) { if (progress.isCanceled()) { break; } FileItem fileItem = (FileItem) obj; InputStream stream = fileItem.getInputStream(); String name = fileItem.getFieldName().toLowerCase(); if (fileItem.isFormField()) { if (name.equals("clipboard")) { File file = allocateFile(rawDataDir, "clipboard.txt"); JSONObject fileRecord = new JSONObject(); JSONUtilities.safePut(fileRecord, "origin", "clipboard"); JSONUtilities.safePut(fileRecord, "declaredEncoding", request.getCharacterEncoding()); JSONUtilities.safePut(fileRecord, "declaredMimeType", (String) null); JSONUtilities.safePut(fileRecord, "format", "text"); JSONUtilities.safePut(fileRecord, "fileName", "(clipboard)"); JSONUtilities.safePut(fileRecord, "location", getRelativePath(file, rawDataDir)); progress.setProgress("Uploading pasted clipboard text", calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize)); JSONUtilities.safePut(fileRecord, "size", saveStreamToFile(stream, file, null)); clipboardCount++; JSONUtilities.append(fileRecords, fileRecord); } else if (name.equals("download")) { String urlString = Streams.asString(stream); URL url = new URL(urlString); JSONObject fileRecord = new JSONObject(); JSONUtilities.safePut(fileRecord, "origin", "download"); JSONUtilities.safePut(fileRecord, "url", urlString); for (UrlRewriter rewriter : ImportingManager.urlRewriters) { Result result = rewriter.rewrite(urlString); if (result != null) { urlString = result.rewrittenUrl; url = new URL(urlString); JSONUtilities.safePut(fileRecord, "url", urlString); JSONUtilities.safePut(fileRecord, "format", result.format); if (!result.download) { downloadCount++; JSONUtilities.append(fileRecords, fileRecord); continue parts; } } } URLConnection urlConnection = url.openConnection(); urlConnection.setConnectTimeout(5000); if (urlConnection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) urlConnection; RefineServlet.setUserAgent(httpConnection); } urlConnection.connect(); InputStream stream2 = urlConnection.getInputStream(); try { File file = allocateFile(rawDataDir, url.getFile()); int contentLength = urlConnection.getContentLength(); if (contentLength > 0) { update.totalExpectedSize += contentLength; } JSONUtilities.safePut(fileRecord, "declaredEncoding", urlConnection.getContentEncoding()); JSONUtilities.safePut(fileRecord, "declaredMimeType", urlConnection.getContentType()); JSONUtilities.safePut(fileRecord, "fileName", file.getName()); JSONUtilities.safePut(fileRecord, "location", getRelativePath(file, rawDataDir)); progress.setProgress("Downloading " + urlString, calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize)); long actualLength = saveStreamToFile(stream2, file, update); JSONUtilities.safePut(fileRecord, "size", actualLength); if (actualLength == 0) { throw new Exception("No content found in " + urlString); } else if (contentLength >= 0) { update.totalExpectedSize += (actualLength - contentLength); } else { update.totalExpectedSize += actualLength; } progress.setProgress("Saving " + urlString + " locally", calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize)); if (postProcessRetrievedFile(file, fileRecord, fileRecords, progress)) { archiveCount++; } downloadCount++; } finally { stream2.close(); } } else { String value = Streams.asString(stream); parameters.put(name, value); // TODO: We really want to store this on the request so it's available for everyone // request.getParameterMap().put(name, value); } } else { // is file content String fileName = fileItem.getName(); if (fileName.length() > 0) { long fileSize = fileItem.getSize(); File file = allocateFile(rawDataDir, fileName); JSONObject fileRecord = new JSONObject(); JSONUtilities.safePut(fileRecord, "origin", "upload"); JSONUtilities.safePut(fileRecord, "declaredEncoding", request.getCharacterEncoding()); JSONUtilities.safePut(fileRecord, "declaredMimeType", fileItem.getContentType()); JSONUtilities.safePut(fileRecord, "fileName", fileName); JSONUtilities.safePut(fileRecord, "location", getRelativePath(file, rawDataDir)); progress.setProgress( "Saving file " + fileName + " locally (" + formatBytes(fileSize) + " bytes)", calculateProgressPercent(update.totalExpectedSize, update.totalRetrievedSize)); JSONUtilities.safePut(fileRecord, "size", saveStreamToFile(stream, file, null)); if (postProcessRetrievedFile(file, fileRecord, fileRecords, progress)) { archiveCount++; } uploadCount++; } } } JSONUtilities.safePut(retrievalRecord, "uploadCount", uploadCount); JSONUtilities.safePut(retrievalRecord, "downloadCount", downloadCount); JSONUtilities.safePut(retrievalRecord, "clipboardCount", clipboardCount); JSONUtilities.safePut(retrievalRecord, "archiveCount", archiveCount); } static public String getRelativePath(File file, File dir) { String location = file.getAbsolutePath().substring(dir.getAbsolutePath().length()); return (location.startsWith(File.separator)) ? location.substring(1) : location; } static public File allocateFile(File dir, String name) { int q = name.indexOf('?'); if (q > 0) { name = name.substring(0, q); } File file = new File(dir, name); int dot = name.indexOf('.'); String prefix = dot < 0 ? name : name.substring(0, dot); String suffix = dot < 0 ? "" : name.substring(dot); int index = 2; while (file.exists()) { file = new File(dir, prefix + "-" + index++ + suffix); } file.getParentFile().mkdirs(); return file; } static public Reader getFileReader(ImportingJob job, JSONObject fileRecord, String commonEncoding) throws FileNotFoundException { return getFileReader(getFile(job, JSONUtilities.getString(fileRecord, "location", "")), fileRecord, commonEncoding); } static public Reader getFileReader(File file, JSONObject fileRecord, String commonEncoding) throws FileNotFoundException { return getReaderFromStream(new FileInputStream(file), fileRecord, commonEncoding); } static public Reader getReaderFromStream(InputStream inputStream, JSONObject fileRecord, String commonEncoding) { String encoding = getEncoding(fileRecord); if (encoding == null) { encoding = commonEncoding; } if (encoding != null) { try { return new InputStreamReader(inputStream, encoding); } catch (UnsupportedEncodingException e) { // Ignore and fall through } } return new InputStreamReader(inputStream); } static public File getFile(ImportingJob job, JSONObject fileRecord) { return getFile(job, JSONUtilities.getString(fileRecord, "location", "")); } static public File getFile(ImportingJob job, String location) { return new File(job.getRawDataDir(), location); } static public String getFileSource(JSONObject fileRecord) { return JSONUtilities.getString( fileRecord, "url", JSONUtilities.getString(fileRecord, "fileName", "unknown") ); } static private abstract class SavingUpdate { public long totalExpectedSize = 0; public long totalRetrievedSize = 0; abstract public void savedMore(); abstract public boolean isCanceled(); } static public long saveStreamToFile(InputStream stream, File file, SavingUpdate update) throws IOException { long length = 0; FileOutputStream fos = new FileOutputStream(file); try { byte[] bytes = new byte[4096]; int c; while ((update == null || !update.isCanceled()) && (c = stream.read(bytes)) > 0) { fos.write(bytes, 0, c); length += c; if (update != null) { update.totalRetrievedSize += c; update.savedMore(); } } return length; } finally { fos.close(); } } static public boolean postProcessRetrievedFile( File file, JSONObject fileRecord, JSONArray fileRecords, final Progress progress) { String mimeType = JSONUtilities.getString(fileRecord, "declaredMimeType", null); File rawDataDir = file.getParentFile(); InputStream archiveIS = tryOpenAsArchive(file, mimeType); if (archiveIS != null) { try { if (explodeArchive(rawDataDir, archiveIS, fileRecord, fileRecords, progress)) { file.delete(); return true; } } finally { try { archiveIS.close(); } catch (IOException e) { // TODO: what to do? } } } InputStream uncompressedIS = tryOpenAsCompressedFile(file, mimeType); if (uncompressedIS != null) { try { File file2 = uncompressFile(rawDataDir, uncompressedIS, fileRecord, progress); file.delete(); file = file2; } catch (IOException e) { // TODO: what to do? e.printStackTrace(); } finally { try { archiveIS.close(); } catch (IOException e) { // TODO: what to do? } } } postProcessSingleRetrievedFile(file, fileRecord); JSONUtilities.append(fileRecords, fileRecord); return false; } static public void postProcessSingleRetrievedFile(File file, JSONObject fileRecord) { if (!fileRecord.has("format")) { JSONUtilities.safePut(fileRecord, "format", ImportingManager.getFormat( file.getName(), JSONUtilities.getString(fileRecord, "declaredMimeType", null))); } } static public InputStream tryOpenAsArchive(File file, String mimeType) { String fileName = file.getName(); try { if (fileName.endsWith(".tar.gz") || fileName.endsWith(".tgz")) { return new TarInputStream(new GZIPInputStream(new FileInputStream(file))); } else if (fileName.endsWith(".tar.bz2")) { return new TarInputStream(new CBZip2InputStream(new FileInputStream(file))); } else if (fileName.endsWith(".tar")) { return new TarInputStream(new FileInputStream(file)); } else if (fileName.endsWith(".zip")) { return new ZipInputStream(new FileInputStream(file)); } } catch (IOException e) { } return null; } static public boolean explodeArchive( File rawDataDir, InputStream archiveIS, JSONObject archiveFileRecord, JSONArray fileRecords, final Progress progress ) { if (archiveIS instanceof TarInputStream) { TarInputStream tis = (TarInputStream) archiveIS; try { TarEntry te; while (!progress.isCanceled() && (te = tis.getNextEntry()) != null) { if (!te.isDirectory()) { String fileName2 = te.getName(); File file2 = allocateFile(rawDataDir, fileName2); progress.setProgress("Extracting " + fileName2, -1); JSONObject fileRecord2 = new JSONObject(); JSONUtilities.safePut(fileRecord2, "origin", JSONUtilities.getString(archiveFileRecord, "origin", null)); JSONUtilities.safePut(fileRecord2, "declaredEncoding", (String) null); JSONUtilities.safePut(fileRecord2, "declaredMimeType", (String) null); JSONUtilities.safePut(fileRecord2, "fileName", fileName2); JSONUtilities.safePut(fileRecord2, "archiveFileName", JSONUtilities.getString(archiveFileRecord, "fileName", null)); JSONUtilities.safePut(fileRecord2, "location", getRelativePath(file2, rawDataDir)); JSONUtilities.safePut(fileRecord2, "size", saveStreamToFile(tis, file2, null)); postProcessSingleRetrievedFile(file2, fileRecord2); JSONUtilities.append(fileRecords, fileRecord2); } } } catch (IOException e) { // TODO: what to do? e.printStackTrace(); } return true; } else if (archiveIS instanceof ZipInputStream) { ZipInputStream zis = (ZipInputStream) archiveIS; try { ZipEntry ze; while (!progress.isCanceled() && (ze = zis.getNextEntry()) != null) { if (!ze.isDirectory()) { String fileName2 = ze.getName(); File file2 = allocateFile(rawDataDir, fileName2); progress.setProgress("Extracting " + fileName2, -1); JSONObject fileRecord2 = new JSONObject(); JSONUtilities.safePut(fileRecord2, "origin", JSONUtilities.getString(archiveFileRecord, "origin", null)); JSONUtilities.safePut(fileRecord2, "declaredEncoding", (String) null); JSONUtilities.safePut(fileRecord2, "declaredMimeType", (String) null); JSONUtilities.safePut(fileRecord2, "fileName", fileName2); JSONUtilities.safePut(fileRecord2, "archiveFileName", JSONUtilities.getString(archiveFileRecord, "fileName", null)); JSONUtilities.safePut(fileRecord2, "location", getRelativePath(file2, rawDataDir)); JSONUtilities.safePut(fileRecord2, "size", saveStreamToFile(zis, file2, null)); postProcessSingleRetrievedFile(file2, fileRecord2); JSONUtilities.append(fileRecords, fileRecord2); } } } catch (IOException e) { // TODO: what to do? e.printStackTrace(); } return true; } return false; } static public InputStream tryOpenAsCompressedFile(File file, String mimeType) { String fileName = file.getName(); try { if (fileName.endsWith(".gz")) { return new GZIPInputStream(new FileInputStream(file)); } else if (fileName.endsWith(".bz2")) { return new CBZip2InputStream(new FileInputStream(file)); } } catch (IOException e) { } return null; } static public File uncompressFile( File rawDataDir, InputStream uncompressedIS, JSONObject fileRecord, final Progress progress ) throws IOException { String fileName = JSONUtilities.getString(fileRecord, "fileName", "unknown"); File file2 = allocateFile(rawDataDir, fileName); progress.setProgress("Uncompressing " + fileName, -1); saveStreamToFile(uncompressedIS, file2, null); JSONUtilities.safePut(fileRecord, "declaredEncoding", (String) null); JSONUtilities.safePut(fileRecord, "declaredMimeType", (String) null); JSONUtilities.safePut(fileRecord, "location", getRelativePath(file2, rawDataDir)); return file2; } static private int calculateProgressPercent(long totalExpectedSize, long totalRetrievedSize) { return totalExpectedSize == 0 ? -1 : (int) (totalRetrievedSize * 100 / totalExpectedSize); } static private String formatBytes(long bytes) { return NumberFormat.getIntegerInstance().format(bytes); } static public String getEncoding(JSONObject fileRecord) { String encoding = JSONUtilities.getString(fileRecord, "encoding", null); if (encoding == null || encoding.isEmpty()) { encoding = JSONUtilities.getString(fileRecord, "declaredEncoding", null); } return encoding; } static public String autoSelectFiles(ImportingJob job, JSONObject retrievalRecord, JSONArray fileSelectionIndexes) { final Map<String, Integer> formatToCount = new HashMap<String, Integer>(); List<String> formats = new ArrayList<String>(); JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files"); int count = fileRecords.length(); for (int i = 0; i < count; i++) { JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, i); String format = JSONUtilities.getString(fileRecord, "format", null); if (format != null) { if (formatToCount.containsKey(format)) { formatToCount.put(format, formatToCount.get(format) + 1); } else { formatToCount.put(format, 1); formats.add(format); } } } Collections.sort(formats, new Comparator<String>() { @Override public int compare(String o1, String o2) { return formatToCount.get(o2) - formatToCount.get(o1); } }); // Default to text/line-based to to avoid parsing as binary/excel. String bestFormat = formats.size() > 0 ? formats.get(0) : "text/line-based"; if (JSONUtilities.getInt(retrievalRecord, "archiveCount", 0) == 0) { // If there's no archive, then select everything for (int i = 0; i < count; i++) { JSONUtilities.append(fileSelectionIndexes, i); } } else { // Otherwise, select files matching the best format for (int i = 0; i < count; i++) { JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, i); String format = JSONUtilities.getString(fileRecord, "format", null); if (format != null && format.equals(bestFormat)) { JSONUtilities.append(fileSelectionIndexes, i); } } // If nothing matches the best format but we have some files, // then select them all if (fileSelectionIndexes.length() == 0 && count > 0) { for (int i = 0; i < count; i++) { JSONUtilities.append(fileSelectionIndexes, i); } } } return bestFormat; } static public String getCommonFormatForSelectedFiles(ImportingJob job, JSONArray fileSelectionIndexes) { JSONObject retrievalRecord = JSONUtilities.getObject(job.config, "retrievalRecord"); final Map<String, Integer> formatToCount = new HashMap<String, Integer>(); List<String> formats = new ArrayList<String>(); JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files"); int count = fileSelectionIndexes.length(); for (int i = 0; i < count; i++) { int index = JSONUtilities.getIntElement(fileSelectionIndexes, i, -1); if (index >= 0 && index < fileRecords.length()) { JSONObject fileRecord = JSONUtilities.getObjectElement(fileRecords, index); String format = JSONUtilities.getString(fileRecord, "format", null); if (format != null) { if (formatToCount.containsKey(format)) { formatToCount.put(format, formatToCount.get(format) + 1); } else { formatToCount.put(format, 1); formats.add(format); } } } } Collections.sort(formats, new Comparator<String>() { @Override public int compare(String o1, String o2) { return formatToCount.get(o2) - formatToCount.get(o1); } }); return formats.size() > 0 ? formats.get(0) : null; } static String guessBetterFormat(ImportingJob job, String bestFormat) { JSONObject retrievalRecord = JSONUtilities.getObject(job.config, "retrievalRecord"); return retrievalRecord != null ? guessBetterFormat(job, retrievalRecord, bestFormat) : bestFormat; } static String guessBetterFormat(ImportingJob job, JSONObject retrievalRecord, String bestFormat) { JSONArray fileRecords = JSONUtilities.getArray(retrievalRecord, "files"); return fileRecords != null ? guessBetterFormat(job, fileRecords, bestFormat) : bestFormat; } static String guessBetterFormat(ImportingJob job, JSONArray fileRecords, String bestFormat) { if (bestFormat != null && fileRecords != null && fileRecords.length() > 0) { JSONObject firstFileRecord = JSONUtilities.getObjectElement(fileRecords, 0); String encoding = getEncoding(firstFileRecord); String location = JSONUtilities.getString(firstFileRecord, "location", null); if (location != null) { File file = new File(job.getRawDataDir(), location); while (true) { String betterFormat = null; List<FormatGuesser> guessers = ImportingManager.formatToGuessers.get(bestFormat); if (guessers != null) { for (FormatGuesser guesser : guessers) { betterFormat = guesser.guess(file, encoding, bestFormat); if (betterFormat != null) { break; } } } if (betterFormat != null && !betterFormat.equals(bestFormat)) { bestFormat = betterFormat; } else { break; } } } } return bestFormat; } static void rankFormats(ImportingJob job, final String bestFormat, JSONArray rankedFormats) { final Map<String, String[]> formatToSegments = new HashMap<String, String[]>(); boolean download = bestFormat == null ? true : ImportingManager.formatToRecord.get(bestFormat).download; List<String> formats = new ArrayList<String>(ImportingManager.formatToRecord.keySet().size()); for (String format : ImportingManager.formatToRecord.keySet()) { Format record = ImportingManager.formatToRecord.get(format); if (record.uiClass != null && record.parser != null && record.download == download) { formats.add(format); formatToSegments.put(format, format.split("/")); } } if (bestFormat == null) { Collections.sort(formats); } else { Collections.sort(formats, new Comparator<String>() { @Override public int compare(String format1, String format2) { if (format1.equals(bestFormat)) { return -1; } else if (format2.equals(bestFormat)) { return 1; } else { return compareBySegments(format1, format2); } } int compareBySegments(String format1, String format2) { int c = commonSegments(format2) - commonSegments(format1); return c != 0 ? c : format1.compareTo(format2); } int commonSegments(String format) { String[] bestSegments = formatToSegments.get(bestFormat); String[] segments = formatToSegments.get(format); if (bestSegments == null || segments == null) { return 0; } else { int i; for (i = 0; i < bestSegments.length && i < segments.length; i++) { if (!bestSegments[i].equals(segments[i])) { break; } } return i; } } }); } for (String format : formats) { JSONUtilities.append(rankedFormats, format); } } static public List<JSONObject> getSelectedFileRecords(ImportingJob job) { List<JSONObject> results = new ArrayList<JSONObject>(); JSONObject retrievalRecord = JSONUtilities.getObject(job.config, "retrievalRecord"); if (retrievalRecord != null) { JSONArray fileRecordArray = JSONUtilities.getArray(retrievalRecord, "files"); if (fileRecordArray != null) { JSONArray fileSelectionArray = JSONUtilities.getArray(job.config, "fileSelection"); if (fileSelectionArray != null) { for (int i = 0; i < fileSelectionArray.length(); i++) { int index = JSONUtilities.getIntElement(fileSelectionArray, i, -1); if (index >= 0 && index < fileRecordArray.length()) { results.add(JSONUtilities.getObjectElement(fileRecordArray, index)); } } } } } return results; } static public void previewParse(ImportingJob job, String format, JSONObject optionObj, List<Exception> exceptions) { Format record = ImportingManager.formatToRecord.get(format); if (record == null || record.parser == null) { // TODO: what to do? return; } job.prepareNewProject(); record.parser.parse( job.project, job.metadata, job, getSelectedFileRecords(job), format, 100, optionObj, exceptions ); job.project.update(); // update all internal models, indexes, caches, etc. } static public long createProject( final ImportingJob job, final String format, final JSONObject optionObj, final List<Exception> exceptions, boolean synchronous) { final Format record = ImportingManager.formatToRecord.get(format); if (record == null || record.parser == null) { // TODO: what to do? return -1; } JSONUtilities.safePut(job.config, "state", "creating-project"); final Project project = new Project(); if (synchronous) { createProjectSynchronously( job, format, optionObj, exceptions, record, project); } else { new Thread() { @Override public void run() { createProjectSynchronously( job, format, optionObj, exceptions, record, project); } }.start(); } return project.id; } static private void createProjectSynchronously( final ImportingJob job, final String format, final JSONObject optionObj, final List<Exception> exceptions, final Format record, final Project project ) { ProjectMetadata pm = new ProjectMetadata(); pm.setName(JSONUtilities.getString(optionObj, "projectName", "Untitled")); String encoding = JSONUtilities.getString(optionObj, "encoding", "UTF-8"); if ("".equals(encoding)) { // encoding can be present, but empty, which won't trigger JSONUtilities default processing encoding = "UTF-8"; } pm.setEncoding(encoding); record.parser.parse( project, pm, job, getSelectedFileRecords(job), format, -1, optionObj, exceptions ); if (!job.canceled) { if (exceptions.size() == 0) { project.update(); // update all internal models, indexes, caches, etc. ProjectManager.singleton.registerProject(project, pm); JSONUtilities.safePut(job.config, "projectID", project.id); JSONUtilities.safePut(job.config, "state", "created-project"); } else { JSONUtilities.safePut(job.config, "state", "error"); JSONUtilities.safePut(job.config, "errors", DefaultImportingController.convertErrorsToJsonArray(exceptions)); } job.touch(); job.updating = false; } } static public void setCreatingProjectProgress(ImportingJob job, String message, int percent) { JSONObject progress = JSONUtilities.getObject(job.config, "progress"); if (progress == null) { progress = new JSONObject(); JSONUtilities.safePut(job.config, "progress", progress); } JSONUtilities.safePut(progress, "message", message); JSONUtilities.safePut(progress, "percent", percent); JSONUtilities.safePut(progress, "memory", Runtime.getRuntime().totalMemory() / 1000000); JSONUtilities.safePut(progress, "maxmemory", Runtime.getRuntime().maxMemory() / 1000000); } }
package org.owasp.esapi.util; import java.io.File; import java.io.IOException; import java.security.SecureRandom; import java.util.Random; /** * Utilities to help with tests that involve files or directories. */ public class FileTestUtils { private static final Class CLASS = FileTestUtils.class; private static final String CLASS_NAME = CLASS.getName(); private static final String DEFAULT_PREFIX = CLASS_NAME + '.'; private static final String DEFAULT_SUFFIX = ".tmp"; private static final Random rand; /* Rational for switching from SecureRandom to Random: This is used for generating filenames for temporary directories. Origionally this was using SecureRandom for this to make /tmp races harder. This is not necessary as mkdir always returns false if if the directory already exists. Additionally, SecureRandom for some reason on linux is appears to be reading from /dev/random instead of /dev/urandom. As such, the many calls for temporary directories in the unit tests quickly depleates the entropy pool causing unit test runs to block until more entropy is collected (this is why moving the mouse speeds up unit tests). */ static { SecureRandom secRand = new SecureRandom(); rand = new Random(secRand.nextLong()); } /** Private constructor as all methods are static. */ private FileTestUtils() { } /** * Convert a long to it's hex representation. Unlike * {@ Long#toHexString(long)} this always returns 16 digits. * @param l The long to convert. * @return l in hex. */ public static String toHexString(long l) { String initial; StringBuffer sb; initial = Long.toHexString(l); if(initial.length() == 16) return initial; sb = new StringBuffer(16); sb.append(initial); while(sb.length()<16) sb.insert(0,'0'); return sb.toString(); } /** * Create a temporary directory. * @param parent The parent directory for the temporary * directory. If this is null, the system property * "java.io.tmpdir" is used. * @param prefix The prefix for the directory's name. If this * is null, the full class name of this class is used. * @param suffix The suffix for the directory's name. If this * is null, ".tmp" is used. * @return The newly created temporary directory. * @throws IOException if directory creation fails * @throws SecurityException if {@link File#mkdir()} throws one. */ public static File createTmpDirectory(File parent, String prefix, String suffix) throws IOException { String name; File dir; if(prefix == null) prefix = DEFAULT_PREFIX; else if(!prefix.endsWith(".")) prefix += '.'; if(suffix == null) suffix = DEFAULT_SUFFIX; else if(!suffix.startsWith(".")) suffix = "." + suffix; if(parent == null) parent = new File(System.getProperty("java.io.tmpdir")); name = prefix + toHexString(rand.nextLong()) + suffix; dir = new File(parent, name); if(!dir.mkdir()) throw new IOException("Unable to create temporary directory " + dir); return dir.getCanonicalFile(); } /** * Create a temporary directory. This calls * {@link #createTmpDirectory(File, String, String)} with null * for parent and suffix. * @param prefix The prefix for the directory's name. If this * is null, the full class name of this class is used. * @return The newly created temporary directory. * @throws IOException if directory creation fails * @throws SecurityException if {@link File#mkdir()} throws one. */ public static File createTmpDirectory(String prefix) throws IOException { return createTmpDirectory(null, prefix, null); } /** * Create a temporary directory. This calls * {@link #createTmpDirectory(File, String, String)} with null * for all arguments. * @return The newly created temporary directory. * @throws IOException if directory creation fails * @throws SecurityException if {@link File#mkdir()} throws one. */ public static File createTmpDirectory() throws IOException { return createTmpDirectory(null,null,null); } /** * Checks that child is a directory and really a child of * parent. This verifies that the {@link File#getCanonicalFile() * canonical} child is actually a child of parent. This should * fail if the child is a symbolic link to another directory and * therefore should not be traversed in a recursive traversal of * a directory. * @param parent The supposed parent of the child * @param child The child to check * @return true if child is a directory and a direct decendant * of parent. * @throws IOException if {@link File#getCanonicalFile()} does * @throws NullPointerException if either parent or child * are null. */ public static boolean isChildSubDirectory(File parent, File child) throws IOException { File childsParent; if(child==null) throw new NullPointerException("child argument is null"); if(!child.isDirectory()) return false; if(parent==null) throw new NullPointerException("parent argument is null"); parent = parent.getCanonicalFile(); child = child.getCanonicalFile(); childsParent = child.getParentFile(); if(childsParent == null) return false; // sym link to /? childsParent = childsParent.getCanonicalFile(); // just in case... if(!parent.equals(childsParent)) return false; return true; } /** * Delete a file. Unlinke {@link File#delete()}, this throws an * exception if deletion fails. * @param file The file to delete * @throws IOException if file is not null, exists but delete * fails. */ public static void delete(File file) throws IOException { if(file==null || !file.exists()) return; if(!file.delete()) throw new IOException("Unable to delete file " + file.getAbsolutePath()); } /** * Recursively delete a file. If file is a directory, * subdirectories and files are also deleted. Care is taken to * not traverse symbolic links in this process. A null file or * a file that does not exist is considered to already been * deleted. * @param file The file or directory to be deleted * @throws IOException if the file, or a descendant, cannot * be deleted. * @throws SecurityException if {@link File#delete()} does. */ public static void deleteRecursively(File file) throws IOException { File[] children; File child; if(file == null || !file.exists()) return; // already deleted? if(file.isDirectory()) { children = file.listFiles(); for(int i=0;i<children.length;i++) { child = children[i]; if(isChildSubDirectory(file,child)) deleteRecursively(child); else delete(child); } } // finally delete(file); } }
package com.healtrav.domain; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.validation.constraints.NotNull; import com.fasterxml.jackson.annotation.JsonIgnore; @Entity public class User { @Id @GeneratedValue(strategy = GenerationType.AUTO) private long id; @NotNull private String username; @JsonIgnore @NotNull private String password; public long getId() { return id; } public void setId(long id) { this.id = id; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } }
package com.bloatit.web.html.pages; //import java.util.Random; import com.bloatit.common.Image; import com.bloatit.common.PageIterable; import com.bloatit.framework.Demand; import com.bloatit.framework.managers.DemandManager; import com.bloatit.web.annotations.PageComponent; import com.bloatit.web.annotations.ParamContainer; import com.bloatit.web.exceptions.RedirectException; import com.bloatit.web.html.HtmlNode; import com.bloatit.web.html.components.custom.HtmlPagedList; import com.bloatit.web.html.components.custom.HtmlProgressBar; import com.bloatit.web.html.components.standard.HtmlDiv; import com.bloatit.web.html.components.standard.HtmlImage; import com.bloatit.web.html.components.standard.HtmlLink; import com.bloatit.web.html.components.standard.HtmlParagraph; import com.bloatit.web.html.components.standard.HtmlRenderer; import com.bloatit.web.html.components.standard.HtmlTitleBlock; import com.bloatit.web.html.pages.master.Page; import com.bloatit.web.utils.url.IdeaPageUrl; import com.bloatit.web.utils.url.IdeasListUrl; @ParamContainer("ideas/list") public class IdeasList extends Page { @PageComponent HtmlPagedList<Demand> pagedIdeaList; private final IdeasListUrl url; public IdeasList(final IdeasListUrl url) throws RedirectException { super(url); this.url = url; generateContent(); } private void generateContent() { final HtmlTitleBlock pageTitle = new HtmlTitleBlock(session.tr("Ideas list"), 1); final PageIterable<Demand> demandList = DemandManager.getDemands(); final HtmlRenderer<Demand> demandItemRenderer = new IdeasListItem(); final IdeasListUrl clonedUrl = url.clone(); pagedIdeaList = new HtmlPagedList<Demand>(demandItemRenderer, demandList, clonedUrl, clonedUrl.getPagedIdeaListUrl()); pageTitle.add(pagedIdeaList); add(pageTitle); } @Override public String getTitle() { return "View all ideas - search ideas"; } @Override public boolean isStable() { return true; } @Override protected String getCustomCss() { return "ideas-list.css"; } static class IdeasListItem implements HtmlRenderer<Demand> { private Demand demand; @Override public HtmlNode generate(final Demand idea) { this.demand = idea; return generateContent(); } private HtmlNode generateContent() { final HtmlDiv ideaBlock = new HtmlDiv("idea_summary"); { final HtmlDiv ideaLinkBlock = new HtmlDiv(); final HtmlLink link = new IdeaPageUrl(demand).getHtmlLink(ideaLinkBlock); link.setCssClass("idea_link"); ideaBlock.add(link); final HtmlDiv leftBlock = new HtmlDiv("idea_summary_left"); { final HtmlDiv karmaBlock = new HtmlDiv("idea_karma"); karmaBlock.add(new HtmlParagraph("" + demand.getPopularity())); leftBlock.add(karmaBlock); } ideaLinkBlock.add(leftBlock); final HtmlDiv centerBlock = new HtmlDiv("idea_summary_center"); { final HtmlTitleBlock ideaTitle = new HtmlTitleBlock("Correction de bug - VLC", 3); { ideaTitle.add(new HtmlParagraph(demand.getTitle())); final float progressValue = (float) Math.floor(demand.getProgression()); final HtmlProgressBar progressBar = new HtmlProgressBar(progressValue); ideaTitle.add(progressBar); } centerBlock.add(ideaTitle); } ideaLinkBlock.add(centerBlock); final HtmlDiv rightBlock = new HtmlDiv("idea_summary_right"); { rightBlock.add(new HtmlImage(new Image("/resources/img/idea.png", Image.ImageType.DISTANT))); } ideaLinkBlock.add(rightBlock); } return ideaBlock; } }; }
package org.takes.facets.auth; import com.google.common.collect.ImmutableMap; import java.io.IOException; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.takes.Response; import org.takes.rq.RqFake; import org.takes.rs.RsWithBody; import org.takes.rs.RsWithStatus; import org.takes.rs.RsWithType; /** * Test case for {@link PsByFlag}. * * @author Yegor Bugayenko (yegor@teamed.io) * @version $Id$ * @since 0.10 */ public final class PsByFlagTest { /** * Testable PsByFlag object. */ private static PsByFlag psbyflag; /** * Key. */ private static final String KEY = "some-key"; /** * HTTP request method. */ private static final String METHOD = "GET"; /** * Mocked Identity object used to test exit method. */ @Mock private static Identity identity; /** * Test set up. * * @throws Exception If some problem inside */ @Before public void setUp() throws Exception { psbyflag = new PsByFlag( new PsByFlag.Pair( KEY, new PsFake(true) ) ); } /** * PsByFlag can skip if nothing found. * * @throws IOException If some problem inside */ @Test public void skipsIfNothingFound() throws IOException { MatcherAssert.assertThat( psbyflag.enter( new RqFake(METHOD, "/?PsByFlag=x") ).hasNext(), Matchers.is(false) ); } /** * PsByFlag finds flag and authenticates user. * * @throws IOException If some problem inside */ @Test public void flagIsFoundUserAuthenticated() throws IOException { MatcherAssert.assertThat( psbyflag.enter( new RqFake(METHOD, "/?PsByFlag=some-key") ).next().urn(), Matchers.is("urn:test:1") ); } /** * PsByFlag wraps response with authenticated user. * * @throws IOException If some problem inside */ @Test public void exitTest() throws IOException { final Response response = new RsWithStatus( new RsWithType( new RsWithBody("<html>This is test response</html>"), "text/html" ), 200 ); MatcherAssert.assertThat( new PsByFlag( new PsByFlag.Pair( KEY, new PsFake(true) ) ).exit(response, this.identity), Matchers.is(response) ); } /** * Checks PsByFlag equality. * * @throws Exception If some problem inside */ @Test public void equalsAndHashCodeEqualTest() throws Exception { MatcherAssert.assertThat( psbyflag.equals( new PsByFlag( new PsByFlag.Pair( KEY, new PsFake(true) ) ) ), Matchers.is(true) ); } /** * Checks PsByFlag inequality. * * @throws Exception If some problem inside */ @Test public void equalsAndHashCodeNotEqualTest() throws Exception { MatcherAssert.assertThat( psbyflag.equals( new PsByFlag( ImmutableMap.of( "some-other-key", (Pass) new PsFake(true) ) ) ), Matchers.is(false) ); } }
package com.ecyrd.jspwiki.tags; import java.io.IOException; import org.apache.commons.lang.StringUtils; import com.ecyrd.jspwiki.providers.ProviderException; /** * Includes body, if the request context matches. * * @author Janne Jalkanen * @since 2.0 */ public class CheckRequestContextTag extends WikiTagBase { private String m_context; private String[] m_contextList = {}; public String getContext() { return m_context; } public void setContext( String arg ) { m_context = arg; m_contextList = StringUtils.split(arg,'|'); } public final int doWikiStartTag() throws IOException, ProviderException { for(int i = 0; i < m_contextList.length; i++ ) { String ctx = m_wikiContext.getRequestContext(); String checkedCtx = m_contextList[i]; if( checkedCtx.length() > 0 ) { if( checkedCtx.charAt(0) == '!' ) { if( !ctx.equalsIgnoreCase(checkedCtx.substring(1) ) ) { return EVAL_BODY_INCLUDE; } } else if( ctx.equalsIgnoreCase(m_contextList[i]) ) { return EVAL_BODY_INCLUDE; } } } return SKIP_BODY; } }
package io.druid.server.http; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; import com.metamx.common.MapUtils; import com.metamx.common.Pair; import com.metamx.common.guava.Comparators; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.InventoryView; import io.druid.client.indexing.IndexingServiceClient; import io.druid.db.DatabaseSegmentManager; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Response; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; @Path("/druid/coordinator/v1/datasources") public class DatasourcesResource { private final InventoryView serverInventoryView; private final DatabaseSegmentManager databaseSegmentManager; private final IndexingServiceClient indexingServiceClient; @Inject public DatasourcesResource( InventoryView serverInventoryView, DatabaseSegmentManager databaseSegmentManager, @Nullable IndexingServiceClient indexingServiceClient ) { this.serverInventoryView = serverInventoryView; this.databaseSegmentManager = databaseSegmentManager; this.indexingServiceClient = indexingServiceClient; } @GET @Produces("application/json") public Response getQueryableDataSources( @QueryParam("full") String full, @QueryParam("simple") String simple ) { Response.ResponseBuilder builder = Response.ok(); if (full != null) { return builder.entity(getDataSources()).build(); } else if (simple != null) { return builder.entity( Lists.newArrayList( Iterables.transform( getDataSources(), new Function<DruidDataSource, Map<String, Object>>() { @Override public Map<String, Object> apply(DruidDataSource dataSource) { return makeSimpleDatasource(dataSource); } } ) ) ).build(); } return builder.entity( Lists.newArrayList( Iterables.transform( getDataSources(), new Function<DruidDataSource, String>() { @Override public String apply(DruidDataSource dataSource) { return dataSource.getName(); } } ) ) ).build(); } @GET @Path("/{dataSourceName}") @Produces("application/json") public Response getTheDataSource( @PathParam("dataSourceName") final String dataSourceName, @QueryParam("full") final String full ) { DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); if (dataSource == null) { return Response.noContent().build(); } if (full != null) { return Response.ok(dataSource).build(); } return Response.ok(getSimpleDatasource(dataSourceName)).build(); } @POST @Path("/{dataSourceName}") @Consumes("application/json") public Response enableDataSource( @PathParam("dataSourceName") final String dataSourceName ) { if (!databaseSegmentManager.enableDatasource(dataSourceName)) { return Response.noContent().build(); } return Response.ok().build(); } @DELETE @Path("/{dataSourceName}") @Produces("application/json") public Response deleteDataSource( @PathParam("dataSourceName") final String dataSourceName, @QueryParam("kill") final String kill, @QueryParam("interval") final String interval ) { if (indexingServiceClient == null) { return Response.ok(ImmutableMap.of("error", "no indexing service found")).build(); } if (kill != null && Boolean.valueOf(kill)) { try { indexingServiceClient.killSegments(dataSourceName, new Interval(interval)); } catch (Exception e) { return Response.serverError().entity( ImmutableMap.of( "error", "Exception occurred. Are you sure you have an indexing service?" ) ) .build(); } } else { if (!databaseSegmentManager.removeDatasource(dataSourceName)) { return Response.noContent().build(); } } return Response.ok().build(); } @GET @Path("/{dataSourceName}/intervals") @Produces("application/json") public Response getSegmentDataSourceIntervals( @PathParam("dataSourceName") String dataSourceName, @QueryParam("simple") String simple, @QueryParam("full") String full ) { final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); if (dataSource == null) { return Response.noContent().build(); } final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { Map<String, Object> segments = retVal.get(dataSegment.getInterval()); if (segments == null) { segments = Maps.newHashMap(); retVal.put(dataSegment.getInterval(), segments); } Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier()); segments.put(dataSegment.getIdentifier(), ImmutableMap.of("metadata", val.lhs, "servers", val.rhs)); } return Response.ok(retVal).build(); } if (simple != null) { final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { Map<String, Object> properties = retVal.get(dataSegment.getInterval()); if (properties == null) { properties = Maps.newHashMap(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); retVal.put(dataSegment.getInterval(), properties); } else { properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize()); properties.put("count", MapUtils.getInt(properties, "count", 0) + 1); } } return Response.ok(retVal).build(); } final Set<Interval> intervals = Sets.newTreeSet(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { intervals.add(dataSegment.getInterval()); } return Response.ok(intervals).build(); } @GET @Path("/{dataSourceName}/intervals/{interval}") @Produces("application/json") public Response getSegmentDataSourceSpecificInterval( @PathParam("dataSourceName") String dataSourceName, @PathParam("interval") String interval, @QueryParam("simple") String simple, @QueryParam("full") String full ) { final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); final Interval theInterval = new Interval(interval.replace("_", "/")); if (dataSource == null) { return Response.noContent().build(); } final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map<String, Object> segments = retVal.get(dataSegment.getInterval()); if (segments == null) { segments = Maps.newHashMap(); retVal.put(dataSegment.getInterval(), segments); } Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier()); segments.put(dataSegment.getIdentifier(), ImmutableMap.of("metadata", val.lhs, "servers", val.rhs)); } } return Response.ok(retVal).build(); } if (simple != null) { final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap(); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map<String, Object> properties = retVal.get(dataSegment.getInterval()); if (properties == null) { properties = Maps.newHashMap(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); retVal.put(dataSegment.getInterval(), properties); } else { properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize()); properties.put("count", MapUtils.getInt(properties, "count", 0) + 1); } } } return Response.ok(retVal).build(); } final Set<String> retVal = Sets.newTreeSet(Comparators.inverse(String.CASE_INSENSITIVE_ORDER)); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { retVal.add(dataSegment.getIdentifier()); } } return Response.ok(retVal).build(); } @GET @Path("/{dataSourceName}/segments") @Produces("application/json") public Response getSegmentDataSourceSegments( @PathParam("dataSourceName") String dataSourceName, @QueryParam("full") String full ) { DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); if (dataSource == null) { return Response.noContent().build(); } Response.ResponseBuilder builder = Response.ok(); if (full != null) { return builder.entity(dataSource.getSegments()).build(); } return builder.entity( Iterables.transform( dataSource.getSegments(), new Function<DataSegment, Object>() { @Override public Object apply(DataSegment segment) { return segment.getIdentifier(); } } ) ).build(); } @GET @Path("/{dataSourceName}/segments/{segmentId}") @Produces("application/json") public Response getSegmentDataSourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); if (dataSource == null) { return Response.noContent().build(); } Pair<DataSegment, Set<String>> retVal = getSegment(segmentId); if (retVal != null) { return Response.ok( ImmutableMap.of("metadata", retVal.lhs, "servers", retVal.rhs) ).build(); } return Response.noContent().build(); } @DELETE @Path("/{dataSourceName}/segments/{segmentId}") public Response deleteDatasourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { if (!databaseSegmentManager.removeSegment(dataSourceName, segmentId)) { return Response.noContent().build(); } return Response.ok().build(); } @POST @Path("/{dataSourceName}/segments/{segmentId}") @Consumes("application/json") public Response enableDatasourceSegment( @PathParam("dataSourceName") String dataSourceName, @PathParam("segmentId") String segmentId ) { if (!databaseSegmentManager.enableSegment(segmentId)) { return Response.noContent().build(); } return Response.ok().build(); } @GET @Path("/{dataSourceName}/tiers") @Produces("application/json") public Response getSegmentDataSourceTiers( @PathParam("dataSourceName") String dataSourceName ) { Set<String> retVal = Sets.newHashSet(); for (DruidServer druidServer : serverInventoryView.getInventory()) { if (druidServer.getDataSource(dataSourceName) != null) { retVal.add(druidServer.getTier()); } } return Response.ok(retVal).build(); } private DruidDataSource getDataSource(final String dataSourceName) { Iterable<DruidDataSource> dataSources = Iterables.concat( Iterables.transform( serverInventoryView.getInventory(), new Function<DruidServer, DruidDataSource>() { @Override public DruidDataSource apply(DruidServer input) { return input.getDataSource(dataSourceName); } } ) ); List<DruidDataSource> validDataSources = Lists.newArrayList(); for (DruidDataSource dataSource : dataSources) { if (dataSource != null) { validDataSources.add(dataSource); } } if (validDataSources.isEmpty()) { return null; } Map<String, DataSegment> segmentMap = Maps.newHashMap(); for (DruidDataSource dataSource : validDataSources) { if (dataSource != null) { Iterable<DataSegment> segments = dataSource.getSegments(); for (DataSegment segment : segments) { segmentMap.put(segment.getIdentifier(), segment); } } } return new DruidDataSource( dataSourceName, ImmutableMap.<String, String>of() ).addSegments(segmentMap); } private Set<DruidDataSource> getDataSources() { TreeSet<DruidDataSource> dataSources = Sets.newTreeSet( new Comparator<DruidDataSource>() { @Override public int compare(DruidDataSource druidDataSource, DruidDataSource druidDataSource1) { return druidDataSource.getName().compareTo(druidDataSource1.getName()); } } ); dataSources.addAll( Lists.newArrayList( Iterables.concat( Iterables.transform( serverInventoryView.getInventory(), new Function<DruidServer, Iterable<DruidDataSource>>() { @Override public Iterable<DruidDataSource> apply(DruidServer input) { return input.getDataSources(); } } ) ) ) ); return dataSources; } private Pair<DataSegment, Set<String>> getSegment(String segmentId) { DataSegment theSegment = null; Set<String> servers = Sets.newHashSet(); for (DruidServer druidServer : serverInventoryView.getInventory()) { DataSegment currSegment = druidServer.getSegments().get(segmentId); if (currSegment != null) { theSegment = currSegment; servers.add(druidServer.getHost()); } } if (theSegment == null) { return null; } return new Pair<>(theSegment, servers); } private Map<String, Object> makeSimpleDatasource(DruidDataSource input) { return new ImmutableMap.Builder<String, Object>() .put("name", input.getName()) .put("properties", getSimpleDatasource(input.getName())) .build(); } private Map<String, Map<String, Object>> getSimpleDatasource(String dataSourceName) { Map<String, Object> tiers = Maps.newHashMap(); Map<String, Object> segments = Maps.newHashMap(); Map<String, Map<String, Object>> retVal = ImmutableMap.of( "tiers", tiers, "segments", segments ); int totalSegmentCount = 0; long totalSegmentSize = 0; long minTime = Long.MAX_VALUE; long maxTime = Long.MIN_VALUE; for (DruidServer druidServer : serverInventoryView.getInventory()) { DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName); if (druidDataSource == null) { continue; } long dataSourceSegmentSize = 0; for (DataSegment dataSegment : druidDataSource.getSegments()) { dataSourceSegmentSize += dataSegment.getSize(); if (dataSegment.getInterval().getStartMillis() < minTime) { minTime = dataSegment.getInterval().getStartMillis(); } if (dataSegment.getInterval().getEndMillis() > maxTime) { maxTime = dataSegment.getInterval().getEndMillis(); } } // segment stats totalSegmentCount += druidDataSource.getSegments().size(); totalSegmentSize += dataSourceSegmentSize; // tier stats Map<String, Object> tierStats = (Map) tiers.get(druidServer.getTier()); if (tierStats == null) { tierStats = Maps.newHashMap(); tiers.put(druidServer.getTier(), tierStats); } int segmentCount = MapUtils.getInt(tierStats, "segmentCount", 0); tierStats.put("segmentCount", segmentCount + druidDataSource.getSegments().size()); long segmentSize = MapUtils.getLong(tierStats, "size", 0L); tierStats.put("size", segmentSize + dataSourceSegmentSize); } segments.put("count", totalSegmentCount); segments.put("size", totalSegmentSize); segments.put("minTime", new DateTime(minTime)); segments.put("maxTime", new DateTime(maxTime)); return retVal; } }
package org.fundacionjala.automation.scenario.steps.tablet.settings; import org.fundacionjala.automation.framework.maps.tablet.settings.ConnectionMap; import org.fundacionjala.automation.framework.maps.tablet.settings.NavigationMap; import org.fundacionjala.automation.framework.pages.tablet.settings.NavigationPage; import org.fundacionjala.automation.framework.utils.common.ExplicitWait; import org.junit.Assert; import cucumber.api.java.en.Then; public class SettingsThenSteps { @Then("^A message that describes that the connection has been successful should be displayed$") public void a_message_that_describes_that_the_connection_has_been_successful_should_be_displayed() throws Throwable { boolean verification = false; if(ExplicitWait.waitForElement(ConnectionMap.MESSAGE_STATUS, 20)){ verification = true; } Assert.assertTrue(verification); } @Then("^The list of Conference Rooms should have the corresponding rooms$") public void the_list_of_Conference_Rooms_should_have_the_corresponding_rooms() throws Throwable { NavigationPage navigation = new NavigationPage(); Assert.assertTrue(navigation.clickOnRoomToggleButton() .verifyIfRoomsExist()); } @Then("^A message that describes that the connection with the room has been successful should be displayed$") public void a_message_that_describes_that_the_connection_with_the_room_has_been_successful_should_be_displayed() throws Throwable { boolean verification = false; if(ExplicitWait.waitForElement(NavigationMap.MESSAGE_STATUS, 20)){ verification = true; } Assert.assertTrue(verification); } @Then("^The Connection page should be displayed with the elements corresponding$") public void the_Connection_page_should_be_displayed_with_the_elements_corresponding() throws Throwable { boolean verification = false; if(ExplicitWait.waitForElement(ConnectionMap.SERVICE_URL_TEXT_FIELD, 20)){ verification = true; } Assert.assertTrue(verification); } @Then("^The Navigation page should be displayed with the elements corresponding$") public void the_Navigation_page_should_be_displayed_with_the_elements_corresponding() throws Throwable { boolean verification = false; if((ExplicitWait.waitForElement(NavigationMap.DEFAULT_ROOM_TOGGLE_BUTTON, 20))&& (ExplicitWait.waitForElement(NavigationMap.INACTIVITY_TIMEOUT_FIELD, 20))){ verification = true; } Assert.assertTrue(verification); } @Then("^The results displayed should be according the filter inserted \"([^\"]*)\"$") public void the_results_displayed_should_be_according_the_filter_inserted(String filter) throws Throwable { NavigationPage navigation = new NavigationPage(); navigation.verifyIfRoomsExistAccordingFilter(filter); } }
package org.usfirst.frc.team4828; import org.junit.Assert; import org.junit.Test; public class RobotTest { @Test public void testTheThings() { System.out.println("Test ran!"); Assert.assertTrue(true); } @Test public void anotherTest() { int a = 1; int b = 1; System.out.println("Checking that " + a + " is equal to " + b); Assert.assertEquals(1, 1); } }
package com.enigmabridge.comm; import com.enigmabridge.EBEndpointInfo; import com.enigmabridge.EBEngine; import com.enigmabridge.EBSettings; import com.enigmabridge.UserObjectInfo; import com.enigmabridge.create.EBCreateUtils; public class EBAPICall { protected EBConnectionSettings settings; protected EBEndpointInfo endpoint; protected String apiKey; protected String apiVersion = "1.0"; protected String callFunction; protected UserObjectInfo uo; protected byte[] nonce; protected String apiBlock; protected EBRawRequest rawRequest; protected EBRawResponse rawResponse; protected EBConnector connector; protected EBResponseParser responseParser; protected EBEngine engine; public static abstract class AbstractBuilder<T extends EBAPICall, B extends AbstractBuilder> { public B setEndpoint(EBEndpointInfo a) { getObj().setEndpoint(a); return getThisBuilder(); } public B setSettings(EBConnectionSettings b) { getObj().setSettings(b); return getThisBuilder(); } public B setApiKey(String apiKey){ getObj().setApiKey(apiKey); return getThisBuilder(); } public B setApiVersion(String apiVersion){ getObj().setApiVersion(apiVersion); return getThisBuilder(); } public B setUo(UserObjectInfo uo){ getObj().setUo(uo); return getThisBuilder(); } public B setCallFunction(String callFunction){ getObj().setCallFunction(callFunction); return getThisBuilder(); } public B setEngine(EBEngine engine){ getObj().setEngine(engine); final EBSettings settings = engine == null ? null : engine.getDefaultSettings(); if (settings != null){ if (settings.getApiKey() != null && getObj().getApiKey() == null){ getObj().setApiKey(settings.getApiKey()); } if (settings.getEndpointInfo() != null && getObj().getEndpoint() == null){ getObj().setEndpoint(settings.getEndpointInfo()); } if(settings.getConnectionSettings() != null && getObj().getSettings() == null){ getObj().setSettings(settings.getConnectionSettings()); } } return getThisBuilder(); } public B setNonce(byte[] nonce){ getObj().setNonce(nonce); return getThisBuilder(); } public abstract T build(); public abstract B getThisBuilder(); public abstract T getObj(); } public static class Builder extends AbstractBuilder<EBAPICall, Builder> { private final EBAPICall parent = new EBAPICall(); @Override public Builder getThisBuilder() { return this; } @Override public EBAPICall getObj() { return parent; } @Override public EBAPICall build() { return parent; } } /** * Returns response parser when is needed. May lazily initialize parser. * Override point. * * @return Response parser to use */ public EBResponseParser getResponseParser(){ return new EBResponseParserBase(); } /** * Returns respone object to be used by the response parser. * Enables to specify a subclass of the original response class. */ public EBResponse getResponseObject(){ return new EBResponse(); } /** * Builds API key token. * Consists of apiKey and low4B identifier. * Result is returned and set to the property. */ public String buildApiBlock(){ return buildApiBlock(null, null, null); } /** * Builds API key token. * Consists of apiKey and low4B identifier. * Can be specified by parameters or currently set values are set. * Result is returned and set to the property. * * @param apiKey API key string * @param uoId integer or hex-coded string. * @param uoType user object type. Integer or hex-coded string. */ public String buildApiBlock(String apiKey, Long uoId, Long uoType){ String apiKeyToUser = apiKey == null ? this.getApiKey() : apiKey; long uoIdr = uoId == null ? this.getUo().getUoid() : uoId; long type = uoType == null ? this.getUo().getUserObjectType().getValue() : uoType; this.apiBlock = EBCreateUtils.getUoHandle(apiKeyToUser, uoIdr, type); return this.apiBlock; } public String getRequestMethod(){ if (settings == null && rawRequest == null){ return null; } if (settings != null && settings.getMethod() != null){ return settings.getMethod(); } if (rawRequest != null){ return rawRequest.getMethod(); } return null; } /** * Returns true if HTTP POST method should be used for this service call. * @return true if HTTP method is GET */ public boolean isMethodGet(){ final String method = getRequestMethod(); return (method == null && EBCommUtils.METHOD_DEFAULT.equals(EBCommUtils.METHOD_GET)) || "POST".equalsIgnoreCase(method); } /** * Returns true if HTTP POST method should be used for this service call. * @return true if HTTP method is POST */ public boolean isMethodPost(){ final String method = getRequestMethod(); return (method == null && EBCommUtils.METHOD_DEFAULT.equals(EBCommUtils.METHOD_POST)) || "POST".equalsIgnoreCase(method); } public byte[] getNonce() { if (nonce == null){ nonce = EBCommUtils.genProcessDataNonce(); } return nonce; } protected void setNonce(byte[] nonce) { this.nonce = nonce; } public EBConnectionSettings getSettings() { return settings; } protected EBAPICall setSettings(EBConnectionSettings settings) { this.settings = settings; // TODO: should clone own copy? return this; } public EBEndpointInfo getEndpoint() { return endpoint; } protected EBAPICall setEndpoint(EBEndpointInfo endpoint) { this.endpoint = endpoint; return this; } public String getApiKey() { return apiKey; } protected void setApiKey(String apiKey) { this.apiKey = apiKey; } public String getApiVersion() { return apiVersion; } protected void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } public UserObjectInfo getUo() { return uo; } protected void setUo(UserObjectInfo uo) { this.uo = uo; } public String getCallFunction() { return callFunction; } protected void setCallFunction(String callFunction) { this.callFunction = callFunction; } public EBEngine getEngine() { return engine; } protected void setEngine(EBEngine engine) { this.engine = engine; } }
package water.api.schemas3; import water.AutoBuffer; import water.H2O; import water.Iced; import water.IcedWrapper; import water.api.API; import water.util.TwoDimTable; /** * Client-facing Schema of a TwoDimTable * Notes: * 1) We embed the rowHeaders into the table, extending it by 1 column * 2) We store all the data in column-major order * 3) We store all the data in String format * */ public class TwoDimTableV3 extends SchemaV3<TwoDimTable, TwoDimTableV3> { public static class ColumnSpecsBase extends SchemaV3<Iced, ColumnSpecsBase> { @API(help="Column Name", direction=API.Direction.OUTPUT) String name; @API(help="Column Type", direction=API.Direction.OUTPUT) String type; @API(help="Column Format (printf)", direction=API.Direction.OUTPUT) String format; @API(help="Column Description", direction=API.Direction.OUTPUT) String description; } @API(help="Table Name", direction=API.Direction.OUTPUT) public String name; @API(help="Table Description", direction=API.Direction.OUTPUT) public String description; @API(help="Column Specification", direction=API.Direction.OUTPUT) public ColumnSpecsBase[] columns; @API(help="Number of Rows", direction=API.Direction.OUTPUT) public int rowcount; @API(help="Table Data (col-major)", direction=API.Direction.OUTPUT) public IcedWrapper[][] data; public TwoDimTableV3() {} public TwoDimTableV3(TwoDimTable impl) { super(impl); } /** * Fill a TwoDimTable Schema from a TwoDimTable * @param t TwoDimTable * @return TwoDimTableSchema */ @Override public TwoDimTableV3 fillFromImpl(TwoDimTable t) { name = t.getTableHeader(); description = t.getTableDescription(); final int rows = t.getRowDim(); rowcount = rows; boolean have_row_header_cols = t.getColHeaderForRowHeaders() != null; for (int r=0; r<rows; ++r) { if (!have_row_header_cols) break; have_row_header_cols &= t.getRowHeaders()[r] != null; } if (have_row_header_cols) { final int cols = t.getColDim()+1; columns = new ColumnSpecsBase[cols]; columns[0] = new ColumnSpecsBase(); columns[0].name = pythonify(t.getColHeaderForRowHeaders()); columns[0].type = "string"; columns[0].format = "%s"; columns[0].description = t.getColHeaderForRowHeaders(); for (int c = 1; c < cols; ++c) { columns[c] = new ColumnSpecsBase(); columns[c].name = pythonify(t.getColHeaders()[c - 1]); columns[c].type = t.getColTypes()[c - 1]; columns[c].format = t.getColFormats()[c - 1]; columns[c].description = t.getColHeaders()[c - 1]; } data = new IcedWrapper[cols][rows]; data[0] = new IcedWrapper[t.getRowDim()]; for (int r = 0; r < t.getRowDim(); ++r) { data[0][r] = new IcedWrapper(t.getRowHeaders()[r]); } IcedWrapper[][] cellValues = t.getCellValues(); for (int c = 1; c < cols; ++c) { data[c] = new IcedWrapper[rows]; for (int r = 0; r < rows; ++r) { data[c][r] = cellValues[r][c - 1]; } } } else { final int cols = t.getColDim(); columns = new ColumnSpecsBase[cols]; for (int c = 0; c < cols; ++c) { columns[c] = new ColumnSpecsBase(); columns[c].name = pythonify(t.getColHeaders()[c]); columns[c].type = t.getColTypes()[c]; columns[c].format = t.getColFormats()[c]; columns[c].description = t.getColHeaders()[c]; } data = new IcedWrapper[cols][rows]; IcedWrapper[][] cellValues = t.getCellValues(); for (int c = 0; c < cols; ++c) { data[c] = new IcedWrapper[rows]; for (int r = 0; r < rows; ++r) { data[c][r] = cellValues[r][c]; } } } return this; } /** * Turn a description such as "Avg. Training MSE" into a JSON-usable field name "avg_training_mse" * @param n * @return */ private String pythonify(String n) { if (n == null || name.toLowerCase().contains("confusion")) return n; StringBuilder sb = new StringBuilder(); String [] modified = n.split("[\\s_]+"); for (int i=0; i<modified.length; ++i) { if (i!=0) sb.append("_"); String s = modified[i]; // if (!s.matches("^[A-Z]{2,3}$")) { sb.append(s.toLowerCase()); //everything goes lowercase // } else { // sb.append(s); } String newString = sb.toString().replaceAll("[^\\w]", ""); // if (!newString.equals(name)) { // Log.warn("Turning column description into field name: " + name + " --> " + newString); return newString; } /** * Fill a TwoDimTable from this Schema * @param impl * @return */ public TwoDimTable fillImpl(TwoDimTable impl) { final int rows = data[0].length; assert(rows == rowcount); final int cols = data.length+1; String tableHeader = name; String tableDescription = description; String colHeaderForRowHeaders = columns[0].name; String[] rowHeaders = new String[rows]; for (int r=0; r<rows; ++r) { rowHeaders[r] = (String)data[0][r].get(); } String[] colHeaders = new String[cols]; colHeaders[0] = ""; for (int c=1; c<cols; ++c) { colHeaders[c] = columns[c].description; } String[] colTypes = new String[cols]; colTypes[0] = ""; for (int c=1; c<cols; ++c) { colTypes[c] = columns[c].type; } String[] colFormats = new String[cols]; colFormats[0] = "%s"; for (int c=1; c<cols; ++c) { colFormats[c] = columns[c].format; } String[][] strCellValues = new String[rows][cols]; double[][] dblCellValues = new double[rows][cols]; for (int r=0; r<data[0].length; ++r) { for (int c=0; c<data.length; ++c) { try { if (columns[c].format.equals("string")) { // switch(String) is not java1.6 compliant! strCellValues[r][c] = (String)data[c][r].get(); } else if (columns[c].format.equals("double")) { dblCellValues[r][c] = (Double)data[c][r].get(); } else if (columns[c].format.equals("float")) { dblCellValues[r][c] = (Float)data[c][r].get(); } else if (columns[c].format.equals("int")) { dblCellValues[r][c] = (Integer)data[c][r].get(); } else if (columns[c].format.equals("long")) { dblCellValues[r][c] = (Long)data[c][r].get(); } else throw H2O.fail(); } catch (ClassCastException e) { throw new RuntimeException(e); } } } return new TwoDimTable(tableHeader, tableDescription, rowHeaders, colHeaders, colTypes, colFormats, colHeaderForRowHeaders, strCellValues, dblCellValues); } public final AutoBuffer writeJSON_impl(AutoBuffer ab) { ab.putJSONStr("name",name); ab.put1(','); ab.putJSONStr("description",description); ab.put1(','); ab.putJSONStr("columns").put1(':'); ab.put1('['); if( columns!=null ) { for (int i = 0; i < columns.length; ++i) { columns[i].writeJSON(ab); if (i < columns.length - 1) ab.put1(','); } } ab.put1(']'); ab.put1(','); ab.putJSON4("rowcount", rowcount); ab.put1(','); ab.putJSONStr("data").put1(':'); ab.put1('['); if( data!=null ) { for (int i = 0; i < data.length; ++i) { ab.put1('['); for (int j = 0; j < data[i].length; ++j) { if (data[i][j] == null || data[i][j].get() == null) { ab.putJNULL(); } else { data[i][j].writeUnwrappedJSON(ab); } if (j < data[i].length - 1) ab.put1(','); } ab.put1(']'); if (i < data.length - 1) ab.put1(','); } } ab.put1(']'); return ab; } }
package io.djigger.client.jstack; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.djigger.monitoring.java.model.GlobalThreadId; import io.djigger.monitoring.java.model.StackTraceElement; import io.djigger.monitoring.java.model.ThreadInfo; public class Parser { private static final Logger logger = LoggerFactory.getLogger(Parser.class); private final Format format; private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); Matcher threadDumpStartMatcher; Matcher threadDumpEndMatcher; Matcher startMatcher; Matcher stateMatcher; Matcher matcher; Matcher separatorMatcher; List<StackTraceElement> stackTrace = null; String previousLine = null; Date date = null; boolean inThread = false; boolean threadStateFound = false; ParserEventListener listener; public Parser(Format format, ParserEventListener listener) { super(); this.format = format; this.listener = listener; threadDumpStartMatcher = format.getThreadDumpStartPattern().matcher(""); threadDumpEndMatcher = format.getThreadDumpEndPattern().matcher(""); startMatcher = format.getStartPattern().matcher(""); stateMatcher = format.getStatePattern().matcher(""); matcher = format.getMethodPattern().matcher(""); separatorMatcher = format.getSeparatorPattern().matcher(""); } public void consumeLine(String line) throws IOException { threadDumpStartMatcher.reset(line); threadDumpEndMatcher.reset(line); matcher.reset(line); separatorMatcher.reset(line); if (threadDumpEndMatcher.find()) { } if (threadDumpStartMatcher.find()) { if (previousLine != null) { try { date = dateFormat.parse(previousLine); } catch (ParseException e) { date = null; } } } if (!inThread) { startMatcher.reset(line); if (startMatcher.find()) { inThread = true; stackTrace = new LinkedList<StackTraceElement>(); } } else { if (matcher.find()) { stackTrace.add(new StackTraceElement(matcher.group(1), matcher.group(2), "", 0)); } else if (separatorMatcher.find()) { if (stackTrace.size() > 0) { listener.onThreadParsed(toThread(startMatcher, threadStateFound, stateMatcher, stackTrace, date)); } inThread = false; threadStateFound = false; startMatcher.reset(line); if (startMatcher.find()) { inThread = true; stackTrace.clear(); } } } if (inThread && !threadStateFound) { stateMatcher.reset(line); threadStateFound = stateMatcher.find(); } previousLine = line; } public interface ParserEventListener { public void onThreadParsed(ThreadInfo thread); } public static ThreadInfo toThread(Matcher startMatcher, boolean stateFound, Matcher stateMatcher, List<StackTraceElement> stackTrace, Date timestamp) { Long id; try { id = Long.decode(startMatcher.group(2)); } catch (NumberFormatException e) { id = (long) startMatcher.group(2).hashCode(); } String name = startMatcher.group(1); Thread.State state = Thread.State.RUNNABLE; if (stateFound && stateMatcher.groupCount() > 0) { String stateStr = stateMatcher.group(1); try { state = Thread.State.valueOf(stateStr); } catch (Exception e) { } } ThreadInfo thread = new ThreadInfo((StackTraceElement[]) stackTrace.toArray(new StackTraceElement[stackTrace.size()])); thread.setGlobalId(new GlobalThreadId(null, id)); thread.setName(name); thread.setState(state); thread.setTimestamp(timestamp.getTime()); return thread; } public static Format detectFormat(File file) { String line; BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(file)); while ((line = reader.readLine()) != null) { if (line.startsWith("Full thread dump")) { return Format.STANDARD_OUTPUT; } /*if(line.startsWith("Thread t@")) { return Format.JSTACK; } */ } } catch (IOException e) { logger.error("IO error while detecting format of file " + file, e); } finally { try { reader.close(); } catch (IOException e) { } } return Format.WLS; } public enum Format { STANDARD_OUTPUT( Pattern.compile("Full thread dump"), Pattern.compile("JNI global ref"), Pattern.compile("\"(.+?)\".+prio=[0-9]+.* tid=(.+?) nid=.+"), Pattern.compile("java\\.lang\\.Thread\\.State: (.+?) "), Pattern.compile("at (.*)\\.(.+?)\\(.*\\)"), Pattern.compile("^[ \t]*$")), /* JSTACK( Pattern.compile("Deadlock Detection"), Pattern.compile("(Thread) t@([0-9]+)"), Pattern.compile("\\(state = (.+?)\\)"), Pattern.compile(" - (.*)\\.(.+?)\\(.*\\)"), //(((.*):([0-9]*))* Pattern.compile("^[ \t]*$")), */ WLS( Pattern.compile("Current thread stacks for server"), Pattern.compile("Current thread stacks for server"), Pattern.compile("\"((.+?))\""), Pattern.compile("\".+?\".* ([A-Z_]+)$"), Pattern.compile("^[\t ]*(.*)\\.(.+?)\\(.+\\)$"), Pattern.compile("\".+?\"")); private final Pattern threadDumpStartPattern; private final Pattern threadDumpEndPattern; private final Pattern startPattern; private final Pattern statePattern; private final Pattern methodPattern; private final Pattern separatorPattern; private Format(Pattern threadDumpStartPattern, Pattern threadDumpEndPattern, Pattern startPattern, Pattern statePattern, Pattern methodPattern, Pattern separatorPattern) { this.threadDumpStartPattern = threadDumpStartPattern; this.threadDumpEndPattern = threadDumpEndPattern; this.startPattern = startPattern; this.statePattern = statePattern; this.methodPattern = methodPattern; this.separatorPattern = separatorPattern; } public Pattern getStartPattern() { return startPattern; } public Pattern getMethodPattern() { return methodPattern; } public Pattern getSeparatorPattern() { return separatorPattern; } public Pattern getStatePattern() { return statePattern; } public Pattern getThreadDumpStartPattern() { return threadDumpStartPattern; } public Pattern getThreadDumpEndPattern() { return threadDumpEndPattern; } } }
package fi.nls.oskari.domain.map.view; import fi.nls.oskari.util.JSONHelper; import fi.nls.oskari.util.PropertyUtil; import org.apache.commons.lang.text.StrSubstitutor; import org.json.JSONObject; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class View implements Serializable { private long id = -1; private long oldId = -1; private String name = null; private String description = null; private String uuid = null; private boolean onlyForUuId = false; private JSONObject metadata = null; private List<Bundle> bundles = new ArrayList<Bundle>(); public String getUrl() { final Map<String, String> valuesMap = new HashMap(); valuesMap.put("lang", getLang()); valuesMap.put("uuid", getUuid()); final StrSubstitutor sub = new StrSubstitutor(valuesMap); String baseUrl = getBaseUrlForView(getType().toLowerCase(), getLang()); return sub.replace(baseUrl); } public JSONObject getMetadata() { if(metadata == null) { metadata = new JSONObject(); } return metadata; } public String getMetadataAsString() { return getMetadata().toString(); } public void setMetadata(JSONObject metadata) { this.metadata = metadata; } private String getBaseUrlForView(final String type, final String lang) { String value = null; final String basePropKey = "view." + type + ".url"; List<String> urls = PropertyUtil.getPropertyNamesStartingWith(basePropKey); if(urls.size() == 1) { // normal override of defaults value = PropertyUtil.get(basePropKey); } else if(urls.size() > 1) { // locale-specific urls value = PropertyUtil.getOptional(basePropKey + "." + lang); } if(value == null) { // not defined, use reasonable default // oskari.map.url=/oskari-map value = PropertyUtil.get("oskari.domain") + PropertyUtil.get("oskari.map.url"); // uuid param name should match ActionConstants.PARAM_UUID value = value + "?lang=${lang}&uuid=${uuid}"; } return value; } public long getId() { return this.id; } public void setId(long id) { this.id = id; } public long getOldId() { return this.oldId; } public void setOldId(long oldId) { this.oldId = oldId; } public String getUuid() { return this.uuid; } public void setUuid(String uuid) { this.uuid = uuid; } public boolean isOnlyForUuId() { return this.onlyForUuId; } public void setOnlyForUuId(boolean onlyForUuId) { this.onlyForUuId = onlyForUuId; } public String getName() { return this.name; } public void setName(String name) { this.name = name; } public String getDescription() { if(this.description == null) return ""; return this.description; } public void setDescription(String description) { this.description = description; } private String application = "full-map"; // app name private String page = "view"; // JSP private String developmentPath = "/applications"; private long creator = -1; private boolean isPublic = false; private boolean isDefault = false; private String type = null; private String pubDomain = ""; private String lang = PropertyUtil.getDefaultLanguage(); public String getApplication() { return this.application; } public void setApplication(String as) { this.application = as; } public String getPage() { return this.page; } public void setPage(String ba) { this.page = ba; } public long getCreator() { return this.creator; } public void setCreator(long creator) { this.creator = creator; } public boolean isPublic() { return this.isPublic; } public void setIsPublic(boolean isPublic) { this.isPublic = isPublic; } public boolean isDefault() { return this.isDefault; } public void setIsDefault(boolean isDefault) { this.isDefault = isDefault; } public String getPubDomain() { return this.pubDomain; } public void setPubDomain(String pd) { this.pubDomain = pd; } public String getLang() { return this.lang; } public void setLang(String lang) { this.lang = lang; } public String getDevelopmentPath() { return developmentPath; } public void setDevelopmentPath(String developmentPath) { this.developmentPath = developmentPath; } public String getType() { return this.type; } public void setType(String type) { this.type = type; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("["); String name = this.name == null ? null : "'" + this.name.replace("\n", "").replace("\r", "") + "'"; String description = this.description == null ? null : "'" + this.description.replace("\n", "").replace("\r", "") + "'"; String uuid = this.uuid == null ? null : "'" + this.uuid.replace("\n", "").replace("\r", "") + "'"; String lang = this.lang == null ? null : "'" + this.lang.replace("\n", "").replace("\r", "") + "'"; String pubDomain = this.pubDomain == null ? null : "'" + this.pubDomain.replace("\n", "").replace("\r", "") + "'"; sb.append("]"); String ret = "{\n" + " id: " + this.id + ",\n" + " oldId: " + this.oldId + ",\n" + " name: " + name + ",\n" + " description: " + description + ",\n" + " uuid: " + uuid + ",\n" + " lang: " + lang + ",\n" + " pubDomain: " + pubDomain + ",\n" + " url: '" + getUrl() + "',\n" + " states: " + sb.toString() + "\n" + " }\n"; return ret; } public Bundle getBundleByName(String bundleName) { for (Bundle bundle : this.bundles) { if (bundle.getName().equals(bundleName)) { return bundle; } } return null; } public List<Bundle> getBundles() { return this.bundles; } public void setBundles(List<Bundle> bundles) { if (!checkSeqNumbers(bundles)) { resetSeqNumbers(bundles); } this.bundles = bundles; } private boolean checkSeqNumbers(List<Bundle> bundles) { if (bundles != null) { int expected = 1; for (Bundle bundle : bundles) { if (expected++ != bundle.getSeqNo()) { return false; } } } return true; } private void resetSeqNumbers(List<Bundle> bundles) { if (bundles != null) { int seqNo = 1; for (Bundle bundle : bundles) { bundle.setSeqNo(seqNo++); } } } public void addBundle(final Bundle bundle) { if(bundle.getSeqNo() == -1) { // fix sequence number if not set if(this.bundles.size() == 0) { bundle.setSeqNo(1); } else { final int lastIndex = this.bundles.get(bundles.size() -1).getSeqNo(); bundle.setSeqNo(lastIndex + 1); } } this.bundles.add(bundle); } public void removeBundle(final String bundleName) { final Bundle bundle = getBundleByName(bundleName); if(bundle == null) { return; } this.bundles.remove(bundle); int seqNo = 0; for (Bundle b : this.bundles) { b.setSeqNo(seqNo); seqNo++; } } /** * Reset bundle's segment number to be highest values (last bundle in loading) * @param bundleName bundle, which segment number must be highest */ public void pushBundleLast(String bundleName) { final int lastIndex = this.bundles.get(bundles.size() -1).getSeqNo(); for (Bundle bundle : this.bundles) { if (bundle.getName().equals(bundleName)) { bundle.setSeqNo(lastIndex + 1); } } } /** * Skips id, oldId and uuid but clones the rest of the info. Bundles retain ids. * @return cloned object with bundles */ public View cloneBasicInfo() { View view = new View(); // // skip id, oldId, uuid, isDefault // skip id, oldId, uuid view.setName(getName()); view.setDescription(getDescription()); view.setType(getType()); view.setDevelopmentPath(getDevelopmentPath()); view.setApplication(getApplication()); view.setIsPublic(isPublic()); view.setLang(getLang()); view.setPage(getPage()); view.setPubDomain(getPubDomain()); view.setIsDefault(isDefault()); for(Bundle bundle : getBundles()) { view.addBundle(bundle.clone()); } return view; } public JSONObject getMapOptions() { Bundle mapfull = getBundleByName("mapfull"); if (mapfull == null) { return null; } JSONObject config = mapfull.getConfigJSON(); return JSONHelper.getJSONObject(config, "mapOptions"); } public String getSrsName() { JSONObject mapOptions = getMapOptions(); if (mapOptions == null) { return null; } return JSONHelper.getStringFromJSON(mapOptions, "srsName", null); } }
package org.csstudio.alarm.diirt.datasource; import java.net.URI; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.function.Consumer; import java.util.logging.Logger; import org.csstudio.alarm.beast.client.AlarmTreeItem; import org.csstudio.alarm.beast.client.AlarmTreePV; import org.csstudio.alarm.beast.ui.clientmodel.AlarmClientModel; import org.csstudio.alarm.beast.ui.clientmodel.AlarmClientModelListener; import org.diirt.datasource.ChannelHandler; import org.diirt.datasource.DataSource; import org.diirt.datasource.vtype.DataTypeSupport; import com.thoughtworks.xstream.InitializationException; /** * @author Kunal Shroff * */ public class BeastDataSource extends DataSource { private static final Logger log = Logger.getLogger(BeastDataSource.class.getName()); private final BeastTypeSupport typeSupport; // The model, activeAlarms and acknowledgedAlarms is shared by the entire // datasource, the benefit of does this at the datasource level instead of // in each channel is that they need to be computed only once and only a single // copy needs to be maintained. private AlarmClientModel model; private Map<String, List<Consumer>> map = Collections.synchronizedMap(new HashMap<String, List<Consumer>>()); private Executor executor = Executors.newScheduledThreadPool(4); private boolean serverTimeout = false; static { // Install type support for the types it generates. DataTypeSupport.install(); } public BeastDataSource(BeastDataSourceConfiguration configuration) { super(true); typeSupport = new BeastTypeSupport(); try { // Create an instance to the AlarmClientModel final CompletableFuture<Void> future = CompletableFuture .supplyAsync(() -> initialize(configuration), executor).thenAccept((model) -> { this.model = model; this.model.addListener(new AlarmClientModelListener() { @Override public void newAlarmConfiguration(AlarmClientModel model) { log.config("beast datasource: new alarm configuration"); for (String channelName : map.keySet()) { BeastChannelHandler channel = (BeastChannelHandler) getChannels() .get(channelHandlerLookupName(channelName)); channel.reconnect(); } } @Override public void serverTimeout(AlarmClientModel model) { // TODO Auto-generated method stub log.warning("beast datasource: server timeout " + model.isServerAlive()); serverTimeout = true; for (String channelName : map.keySet()) { BeastChannelHandler channel = (BeastChannelHandler) getChannels() .get(channelHandlerLookupName(channelName)); channel.reconnect(); } } @Override public void serverModeUpdate(AlarmClientModel model, boolean maintenance_mode) { // TODO Auto-generated method stub log.info("beast datasource: server mode update"); } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void newAlarmState(AlarmClientModel alarmModel, AlarmTreePV pv, boolean parent_changed) { log.config("beast datasource: new alarm state " + pv ); if (pv != null) { log.fine(pv.getPathName()); List<Consumer> pathHandlers = map.get(pv.getPathName().substring(1)); if (pathHandlers != null) { for (Consumer consumer : pathHandlers) { consumer.accept(pv); } } List<Consumer> pvHandlers = map.get(pv.getName()); if (pvHandlers != null) { for (Consumer consumer : pvHandlers) { consumer.accept(pv); } } // Notify all parent nodes if parent changed if (parent_changed) { AlarmTreeItem parent = pv.getParent(); while (parent != null) { List<Consumer> parentHandlers = map.get(parent.getPathName().substring(1)); if (parentHandlers != null) { for (Consumer consumer : parentHandlers) { try { consumer.accept(getState(parent.getPathName())); } catch (Exception e) { } } } parent = parent.getParent(); } } }else if(serverTimeout && alarmModel.isServerAlive()){ // The server has recovered from a disconnect event. // Attempt to reconnect all pv's serverTimeout = alarmModel.isServerAlive(); for (String channelName : map.keySet()) { BeastChannelHandler channel = (BeastChannelHandler) getChannels() .get(channelHandlerLookupName(channelName)); channel.reconnect(); } } } }); }); } catch (Exception e) { e.printStackTrace(); } } private AlarmClientModel initialize(BeastDataSourceConfiguration configuration) { AlarmClientModel alarmModel; try { if (configuration.getConfigName() != null && !configuration.getConfigName().isEmpty()) { alarmModel = AlarmClientModel.getInstance(configuration.getConfigName()); } else { alarmModel = AlarmClientModel.getInstance(); } return alarmModel; } catch (Exception e) { return null; } } @Override protected ChannelHandler createChannel(String channelName) { return new BeastChannelHandler(channelName, this); } @Override public void close() { super.close(); model.release(); } /* * (non-Javadoc) Override of default channelHandlerLookupName. * This implementation makes a leading and trailing "/" optional. * All four of these will resolve to the same channel: * "/demo/test/", "/demo/test", "demo/test/" & "demo/test". * * @see org.diirt.datasource.DataSource#channelHandlerLookupName(java.lang. * String) */ @Override protected String channelHandlerLookupName(String channelName) { String channel = channelName; if (channel != null && !channel.equals("/") && !channel.isEmpty()) { if (channel.endsWith("/")) channel = channel.substring(0, channel.length() - 1); if (channel.startsWith("/")) channel = channel.substring(1); } return channel; } public BeastTypeSupport getTypeSupport() { return typeSupport; } @SuppressWarnings("rawtypes") protected void add(String channelName, Consumer beastChannelHandler) { String beastChannel = channelHandlerLookupName(channelName); synchronized (map) { List<Consumer> list = map.get(beastChannel); if (list == null) { list = new ArrayList<Consumer>(); map.put(beastChannel, list); } list.add(beastChannelHandler); } } @SuppressWarnings("rawtypes") protected void remove(String channelName, Consumer beastChannelHandler) { String beastChannel = channelHandlerLookupName(channelName); synchronized (map) { if (map.containsKey(beastChannel)) { map.get(beastChannel).remove(beastChannelHandler); } } } protected AlarmTreeItem getState(String channelName) throws Exception { URI uri = URI.create(URLEncoder.encode(channelName, "UTF-8")); String pvName = uri.getPath().substring(uri.getPath().lastIndexOf("/") + 1); if (model != null) { AlarmTreePV alarmTreePV = model.findPV(pvName); if (alarmTreePV != null) { return alarmTreePV; } else { String path = URLDecoder.decode(uri.getPath(), "UTF-8"); AlarmTreeItem alarmTreeItem = model.getConfigTree().getItemByPath(path); return alarmTreeItem; } } else { throw new InitializationException("Model hasn't been created yet"); } } protected boolean isConnected() { if (model != null) { return model.isServerAlive(); } else { return false; } } protected boolean isWriteAllowed() { if (model != null) { return model.isServerAlive() && model.isWriteAllowed(); } else { return false; } } protected void acknowledge(String channelName, boolean acknowledge) throws Exception { getState(channelName).acknowledge(acknowledge); } // implementing the enable disable mechanism using the example of the // DisableComponentAction protected void enable(String channelName, boolean enable) throws Exception { AlarmTreeItem item = getState(channelName); List<AlarmTreePV> pvs = new ArrayList<AlarmTreePV>(); final CompletableFuture<Void> future = CompletableFuture.runAsync(() -> addPVs(pvs, item, enable), executor) .thenRun(() -> { for (AlarmTreePV alarmTreePV : pvs) { try { model.enable(alarmTreePV, enable); } catch (Exception e) { // TODO handle raising the write exception e.printStackTrace(); new Exception("Failed to enable/disable : " + ((AlarmTreePV) item).getName(), e); } } }); } /** * @param pvs * List where PVs to enable/disable will be added * @param item * Item for which to locate PVs, recursively */ protected void addPVs(final List<AlarmTreePV> pvs, final AlarmTreeItem item, boolean enable) { if (item instanceof AlarmTreePV) { final AlarmTreePV pv = (AlarmTreePV) item; if (pv.isEnabled() != enable) pvs.add(pv); } else { final int N = item.getChildCount(); for (int i = 0; i < N; ++i) addPVs(pvs, item.getChild(i), enable); } } }
package beast.util; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.Arrays; import java.util.Comparator; import java.util.Locale; import java.util.PriorityQueue; import java.util.List; import java.util.ArrayList; import beast.core.Description; import beast.core.Input; import beast.core.StateNode; import beast.core.StateNodeInitialiser; import beast.core.Input.Validate; import beast.evolution.alignment.Alignment; import beast.evolution.alignment.TaxonSet; import beast.evolution.alignment.distance.Distance; import beast.evolution.alignment.distance.JukesCantorDistance; import beast.evolution.tree.Node; import beast.evolution.tree.Tree; /** * Adapted from Weka's HierarchicalClustering class * */ @Description("Create initial beast.tree by hierarchical clustering, either through one of the classic link methods " + "or by neighbor joining. The following link methods are supported: " + "<br/>o single link, " + "<br/>o complete link, " + "<br/>o UPGMA=average link, " + "<br/>o mean link, " + "<br/>o centroid, " + "<br/>o Ward and " + "<br/>o adjusted complete link " + "<br/>o neighborjoining " + "<br/>o neighborjoining2 - corrects tree for tip data, unlike plain neighborjoining") public class ClusterTree extends Tree implements StateNodeInitialiser { final static String M_SINGLE = "single"; final static String M_AVERAGE = "average"; final static String M_COMPLETE = "complete"; final static String M_UPGMA = "upgma"; final static String M_MEAN = "mean"; final static String M_CENTROID = "centroid"; final static String M_WARD = "ward"; final static String M_ADJCOMPLETE = "adjcomplete"; final static String M_NEIGHBORJOINING = "neighborjoining"; final static String M_NEIGHBORJOINING2 = "neighborjoining2"; double EPSILON = 1e-10; final static String[] TYPES = {M_SINGLE, M_AVERAGE, M_COMPLETE, M_UPGMA, M_MEAN, M_CENTROID, M_WARD, M_ADJCOMPLETE, M_NEIGHBORJOINING, M_NEIGHBORJOINING2}; public Input<String> clusterTypeInput = new Input<String>("clusterType", "type of clustering algorithm used for generating initial beast.tree. " + "Should be one of " + Arrays.toString(TYPES) + " (default " + M_AVERAGE + ")", M_AVERAGE, TYPES); public Input<Alignment> dataInput = new Input<Alignment>("taxa", "alignment data used for calculating distances for clustering"); //public Input<TaxonSet> taxonSetInput = new Input<TaxonSet>("taxonset", "specifies taxon set in same order as used for distance", Validate.XOR, dataInput); public Input<Distance> distanceInput = new Input<Distance>("distance", "method for calculating distance between two sequences (default Jukes Cantor)"); /** * Whether the distance represent node height (if false) or branch length (if true). */ protected boolean distanceIsBranchLength = false; Distance distance; List<String> taxaNames; @Override public void initAndValidate() throws Exception { if (dataInput.get() != null) { taxaNames = dataInput.get().getTaxaNames(); } else { if (m_taxonset.get() == null) { throw new Exception("At least one of taxa and taxonset input needs to be specified"); } taxaNames = m_taxonset.get().asStringList(); } if (Boolean.valueOf(System.getProperty("beast.resume")) && (isEstimatedInput.get() || (m_initial.get() != null && m_initial.get().isEstimatedInput.get()))) { // don't bother creating a cluster tree to save some time, if it is read from file anyway // make a caterpillar Node left = newNode(); left.setNr(0); left.setID(taxaNames.get(0)); left.setHeight(0); for (int i = 1; i < taxaNames.size(); i++) { Node right = newNode(); right.setNr(i); right.setID(taxaNames.get(i)); right.setHeight(0); Node parent = newNode(); parent.setNr(taxaNames.size() + i - 1); parent.setHeight(i); left.setParent(parent); parent.setLeft(left); right.setParent(parent); parent.setRight(right); left = parent; } root = left; leafNodeCount = taxaNames.size(); nodeCount = leafNodeCount * 2 - 1; internalNodeCount = leafNodeCount - 1; super.initAndValidate(); return; } distance = distanceInput.get(); if (distance == null) { distance = new JukesCantorDistance(); } if (distance instanceof Distance.Base){ if (dataInput.get() == null) { // Distance requires an alignment? } ((Distance.Base) distance).setPatterns(dataInput.get()); } String sType = clusterTypeInput.get().toLowerCase(); if (sType.equals(M_SINGLE)) { m_nLinkType = SINGLE; } else if (sType.equals(M_COMPLETE)) { m_nLinkType = COMPLETE; } else if (sType.equals(M_AVERAGE)) { m_nLinkType = AVERAGE; } else if (sType.equals(M_UPGMA)) { m_nLinkType = AVERAGE; } else if (sType.equals(M_MEAN)) { m_nLinkType = MEAN; } else if (sType.equals(M_CENTROID)) { m_nLinkType = CENTROID; } else if (sType.equals(M_WARD)) { m_nLinkType = WARD; } else if (sType.equals(M_ADJCOMPLETE)) { m_nLinkType = ADJCOMLPETE; } else if (sType.equals(M_NEIGHBORJOINING)) { m_nLinkType = NEIGHBOR_JOINING; distanceIsBranchLength = true; } else if (sType.equals(M_NEIGHBORJOINING2)) { m_nLinkType = NEIGHBOR_JOINING2; distanceIsBranchLength = true; } else { System.out.println("Warning: unrecognized cluster type. Using Average/UPGMA."); m_nLinkType = AVERAGE; } Node root = buildClusterer(); setRoot(root); root.labelInternalNodes((getNodeCount() + 1) / 2); super.initAndValidate(); if (m_nLinkType == NEIGHBOR_JOINING2) { // set tip dates to zero Node[] nodes = getNodesAsArray(); for (int i = 0; i < getLeafNodeCount(); i++) { nodes[i].setHeight(0); } super.initAndValidate(); } if (m_initial.get() != null && m_initial.get().m_trait.get() != null) { adjustTreeToNodeHeights(root, m_initial.get().m_trait.get()); } else if (m_trait.get() == null) { // all nodes should be at zero height if no date-trait is available for (int i = 0; i < getLeafNodeCount(); i++) { getNode(i).setHeight(0); } } initStateNodes(); } /** * the various link types */ final static int SINGLE = 0; final static int COMPLETE = 1; final static int AVERAGE = 2; final static int MEAN = 3; final static int CENTROID = 4; final static int WARD = 5; final static int ADJCOMLPETE = 6; final static int NEIGHBOR_JOINING = 7; final static int NEIGHBOR_JOINING2 = 8; /** * Holds the Link type used calculate distance between clusters */ int m_nLinkType = SINGLE; public ClusterTree() { } // c'tor /** * class representing node in cluster hierarchy * */ class NodeX { NodeX m_left; NodeX m_right; NodeX m_parent; int m_iLeftInstance; int m_iRightInstance; double m_fLeftLength = 0; double m_fRightLength = 0; double m_fHeight = 0; void setHeight(double fHeight1, double fHeight2) { if (fHeight1 < EPSILON) { fHeight1 = EPSILON; } if (fHeight2 < EPSILON) { fHeight2 = EPSILON; } m_fHeight = fHeight1; if (m_left == null) { m_fLeftLength = fHeight1; } else { m_fLeftLength = fHeight1 - m_left.m_fHeight; } if (m_right == null) { m_fRightLength = fHeight2; } else { m_fRightLength = fHeight2 - m_right.m_fHeight; } } void setLength(double fLength1, double fLength2) { if (fLength1 < EPSILON) { fLength1 = EPSILON; } if (fLength2 < EPSILON) { fLength2 = EPSILON; } m_fLeftLength = fLength1; m_fRightLength = fLength2; m_fHeight = fLength1; if (m_left != null) { m_fHeight += m_left.m_fHeight; } } public String toString() { DecimalFormat myFormatter = new DecimalFormat(" if (m_left == null) { if (m_right == null) { return "(" + taxaNames.get(m_iLeftInstance) + ":" + myFormatter.format(m_fLeftLength) + "," + taxaNames.get(m_iRightInstance) + ":" + myFormatter.format(m_fRightLength) + ")"; } else { return "(" + taxaNames.get(m_iLeftInstance) + ":" + myFormatter.format(m_fLeftLength) + "," + m_right.toString() + ":" + myFormatter.format(m_fRightLength) + ")"; } } else { if (m_right == null) { return "(" + m_left.toString() + ":" + myFormatter.format(m_fLeftLength) + "," + taxaNames.get(m_iRightInstance) + ":" + myFormatter.format(m_fRightLength) + ")"; } else { return "(" + m_left.toString() + ":" + myFormatter.format(m_fLeftLength) + "," + m_right.toString() + ":" + myFormatter.format(m_fRightLength) + ")"; } } } Node toNode() throws Exception { Node node = newNode(); node.setHeight(m_fHeight); if (m_left == null) { node.setLeft(newNode()); node.getLeft().setNr(m_iLeftInstance); node.getLeft().setID(taxaNames.get(m_iLeftInstance)); node.getLeft().setHeight(m_fHeight - m_fLeftLength); if (m_right == null) { node.setRight(newNode()); node.getRight().setNr(m_iRightInstance); node.getRight().setID(taxaNames.get(m_iRightInstance)); node.getRight().setHeight(m_fHeight - m_fRightLength); } else { node.setRight(m_right.toNode()); } } else { node.setLeft(m_left.toNode()); if (m_right == null) { node.setRight(newNode()); node.getRight().setNr(m_iRightInstance); node.getRight().setID(taxaNames.get(m_iRightInstance)); node.getRight().setHeight(m_fHeight - m_fRightLength); } else { node.setRight(m_right.toNode()); } } if (node.getHeight() < node.getLeft().getHeight() + EPSILON) { node.setHeight(node.getLeft().getHeight() + EPSILON); } if (node.getHeight() < node.getRight().getHeight() + EPSILON) { node.setHeight(node.getRight().getHeight() + EPSILON); } node.getRight().setParent(node); node.getLeft().setParent(node); return node; } } // class NodeX /** * used for priority queue for efficient retrieval of pair of clusters to merge* */ class Tuple { public Tuple(double d, int i, int j, int nSize1, int nSize2) { m_fDist = d; m_iCluster1 = i; m_iCluster2 = j; m_nClusterSize1 = nSize1; m_nClusterSize2 = nSize2; } double m_fDist; int m_iCluster1; int m_iCluster2; int m_nClusterSize1; int m_nClusterSize2; } /** * comparator used by priority queue* */ class TupleComparator implements Comparator<Tuple> { public int compare(Tuple o1, Tuple o2) { if (o1.m_fDist < o2.m_fDist) { return -1; } else if (o1.m_fDist == o2.m_fDist) { return 0; } return 1; } } // return distance according to distance metric double distance(int iTaxon1, int iTaxon2) { return distance.pairwiseDistance(iTaxon1, iTaxon2); } // distance // 1-norm double distance(double[] nPattern1, double[] nPattern2) { double fDist = 0; for (int i = 0; i < dataInput.get().getPatternCount(); i++) { fDist += dataInput.get().getPatternWeight(i) * Math.abs(nPattern1[i] - nPattern2[i]); } return fDist / dataInput.get().getSiteCount(); } @SuppressWarnings("unchecked") public Node buildClusterer() throws Exception { int nTaxa = taxaNames.size(); if (nTaxa == 1) { // patalogical case Node node = newNode(); node.setHeight(1); node.setNr(0); return node; } // use array of integer vectors to store cluster indices, // starting with one cluster per instance List<Integer>[] nClusterID = new ArrayList[nTaxa]; for (int i = 0; i < nTaxa; i++) { nClusterID[i] = new ArrayList<Integer>(); nClusterID[i].add(i); } // calculate distance matrix int nClusters = nTaxa; // used for keeping track of hierarchy NodeX[] clusterNodes = new NodeX[nTaxa]; if (m_nLinkType == NEIGHBOR_JOINING || m_nLinkType == NEIGHBOR_JOINING2) { neighborJoining(nClusters, nClusterID, clusterNodes); } else { doLinkClustering(nClusters, nClusterID, clusterNodes); } // move all clusters in m_nClusterID array // & collect hierarchy for (int i = 0; i < nTaxa; i++) { if (nClusterID[i].size() > 0) { return clusterNodes[i].toNode(); } } return null; } // buildClusterer /** * use neighbor joining algorithm for clustering * This is roughly based on the RapidNJ simple implementation and runs at O(n^3) * More efficient implementations exist, see RapidNJ (or my GPU implementation :-)) * * @param nClusters * @param nClusterID * @param clusterNodes */ void neighborJoining(int nClusters, List<Integer>[] nClusterID, NodeX[] clusterNodes) { int n = taxaNames.size(); double[][] fDist = new double[nClusters][nClusters]; for (int i = 0; i < nClusters; i++) { fDist[i][i] = 0; for (int j = i + 1; j < nClusters; j++) { fDist[i][j] = getDistance0(nClusterID[i], nClusterID[j]); fDist[j][i] = fDist[i][j]; } } double[] fSeparationSums = new double[n]; double[] fSeparations = new double[n]; int[] nNextActive = new int[n]; //calculate initial separation rows for (int i = 0; i < n; i++) { double fSum = 0; for (int j = 0; j < n; j++) { fSum += fDist[i][j]; } fSeparationSums[i] = fSum; fSeparations[i] = fSum / (nClusters - 2); nNextActive[i] = i + 1; } while (nClusters > 2) { // find minimum int iMin1 = -1; int iMin2 = -1; double fMin = Double.MAX_VALUE; { int i = 0; while (i < n) { double fSep1 = fSeparations[i]; double[] fRow = fDist[i]; int j = nNextActive[i]; while (j < n) { double fSep2 = fSeparations[j]; double fVal = fRow[j] - fSep1 - fSep2; if (fVal < fMin) { // new minimum iMin1 = i; iMin2 = j; fMin = fVal; } j = nNextActive[j]; } i = nNextActive[i]; } } // record distance double fMinDistance = fDist[iMin1][iMin2]; nClusters double fSep1 = fSeparations[iMin1]; double fSep2 = fSeparations[iMin2]; double fDist1 = (0.5 * fMinDistance) + (0.5 * (fSep1 - fSep2)); double fDist2 = (0.5 * fMinDistance) + (0.5 * (fSep2 - fSep1)); if (nClusters > 2) { // update separations & distance double fNewSeparationSum = 0; double fMutualDistance = fDist[iMin1][iMin2]; double[] fRow1 = fDist[iMin1]; double[] fRow2 = fDist[iMin2]; for (int i = 0; i < n; i++) { if (i == iMin1 || i == iMin2 || nClusterID[i].size() == 0) { fRow1[i] = 0; } else { double fVal1 = fRow1[i]; double fVal2 = fRow2[i]; double fDistance = (fVal1 + fVal2 - fMutualDistance) / 2.0; fNewSeparationSum += fDistance; // update the separationsum of cluster i. fSeparationSums[i] += (fDistance - fVal1 - fVal2); fSeparations[i] = fSeparationSums[i] / (nClusters - 2); fRow1[i] = fDistance; fDist[i][iMin1] = fDistance; } } fSeparationSums[iMin1] = fNewSeparationSum; fSeparations[iMin1] = fNewSeparationSum / (nClusters - 2); fSeparationSums[iMin2] = 0; merge(iMin1, iMin2, fDist1, fDist2, nClusterID, clusterNodes); int iPrev = iMin2; // since iMin1 < iMin2 we havenActiveRows[0] >= 0, so the next loop should be save while (nClusterID[iPrev].size() == 0) { iPrev } nNextActive[iPrev] = nNextActive[iMin2]; } else { merge(iMin1, iMin2, fDist1, fDist2, nClusterID, clusterNodes); break; } } for (int i = 0; i < n; i++) { if (nClusterID[i].size() > 0) { for (int j = i + 1; j < n; j++) { if (nClusterID[j].size() > 0) { double fDist1 = fDist[i][j]; if (nClusterID[i].size() == 1) { merge(i, j, fDist1, 0, nClusterID, clusterNodes); } else if (nClusterID[j].size() == 1) { merge(i, j, 0, fDist1, nClusterID, clusterNodes); } else { merge(i, j, fDist1 / 2.0, fDist1 / 2.0, nClusterID, clusterNodes); } break; } } } } } // neighborJoining /** * Perform clustering using a link method * This implementation uses a priority queue resulting in a O(n^2 log(n)) algorithm * * @param nClusters number of clusters * @param nClusterID * @param clusterNodes */ void doLinkClustering(int nClusters, List<Integer>[] nClusterID, NodeX[] clusterNodes) { int nInstances = taxaNames.size(); PriorityQueue<Tuple> queue = new PriorityQueue<Tuple>(nClusters * nClusters / 2, new TupleComparator()); double[][] fDistance0 = new double[nClusters][nClusters]; for (int i = 0; i < nClusters; i++) { fDistance0[i][i] = 0; for (int j = i + 1; j < nClusters; j++) { fDistance0[i][j] = getDistance0(nClusterID[i], nClusterID[j]); fDistance0[j][i] = fDistance0[i][j]; queue.add(new Tuple(fDistance0[i][j], i, j, 1, 1)); } } while (nClusters > 1) { int iMin1 = -1; int iMin2 = -1; // use priority queue to find next best pair to cluster Tuple t; do { t = queue.poll(); } while (t != null && (nClusterID[t.m_iCluster1].size() != t.m_nClusterSize1 || nClusterID[t.m_iCluster2].size() != t.m_nClusterSize2)); iMin1 = t.m_iCluster1; iMin2 = t.m_iCluster2; merge(iMin1, iMin2, t.m_fDist, t.m_fDist, nClusterID, clusterNodes); // merge clusters // update distances & queue for (int i = 0; i < nInstances; i++) { if (i != iMin1 && nClusterID[i].size() != 0) { int i1 = Math.min(iMin1, i); int i2 = Math.max(iMin1, i); double fDistance = getDistance(fDistance0, nClusterID[i1], nClusterID[i2]); queue.add(new Tuple(fDistance, i1, i2, nClusterID[i1].size(), nClusterID[i2].size())); } } nClusters } } // doLinkClustering void merge(int iMin1, int iMin2, double fDist1, double fDist2, List<Integer>[] nClusterID, NodeX[] clusterNodes) { if (iMin1 > iMin2) { int h = iMin1; iMin1 = iMin2; iMin2 = h; double f = fDist1; fDist1 = fDist2; fDist2 = f; } nClusterID[iMin1].addAll(nClusterID[iMin2]); //nClusterID[iMin2].removeAllElements(); nClusterID[iMin2].removeAll(nClusterID[iMin2]); // track hierarchy NodeX node = new NodeX(); if (clusterNodes[iMin1] == null) { node.m_iLeftInstance = iMin1; } else { node.m_left = clusterNodes[iMin1]; clusterNodes[iMin1].m_parent = node; } if (clusterNodes[iMin2] == null) { node.m_iRightInstance = iMin2; } else { node.m_right = clusterNodes[iMin2]; clusterNodes[iMin2].m_parent = node; } if (distanceIsBranchLength) { node.setLength(fDist1, fDist2); } else { node.setHeight(fDist1, fDist2); } clusterNodes[iMin1] = node; } // merge /** * calculate distance the first time when setting up the distance matrix * */ double getDistance0(List<Integer> cluster1, List<Integer> cluster2) { double fBestDist = Double.MAX_VALUE; switch (m_nLinkType) { case SINGLE: case NEIGHBOR_JOINING: case NEIGHBOR_JOINING2: case CENTROID: case COMPLETE: case ADJCOMLPETE: case AVERAGE: case MEAN: // set up two instances for distance function fBestDist = distance(cluster1.get(0), cluster2.get(0)); break; case WARD: { // finds the distance of the change in caused by merging the cluster. // The information of a cluster is calculated as the error sum of squares of the // centroids of the cluster and its members. double ESS1 = calcESS(cluster1); double ESS2 = calcESS(cluster2); List<Integer> merged = new ArrayList<Integer>(); merged.addAll(cluster1); merged.addAll(cluster2); double ESS = calcESS(merged); fBestDist = ESS * merged.size() - ESS1 * cluster1.size() - ESS2 * cluster2.size(); } break; } return fBestDist; } // getDistance0 /** * calculate the distance between two clusters * * @param cluster1 list of indices of instances in the first cluster * @param cluster2 dito for second cluster * @return distance between clusters based on link type */ double getDistance(double[][] fDistance, List<Integer> cluster1, List<Integer> cluster2) { double fBestDist = Double.MAX_VALUE; switch (m_nLinkType) { case SINGLE: // find single link distance aka minimum link, which is the closest distance between // any item in cluster1 and any item in cluster2 fBestDist = Double.MAX_VALUE; for (int i = 0; i < cluster1.size(); i++) { int i1 = cluster1.get(i); for (int j = 0; j < cluster2.size(); j++) { int i2 = cluster2.get(j); double fDist = fDistance[i1][i2]; if (fBestDist > fDist) { fBestDist = fDist; } } } break; case COMPLETE: case ADJCOMLPETE: // find complete link distance aka maximum link, which is the largest distance between // any item in cluster1 and any item in cluster2 fBestDist = 0; for (int i = 0; i < cluster1.size(); i++) { int i1 = cluster1.get(i); for (int j = 0; j < cluster2.size(); j++) { int i2 = cluster2.get(j); double fDist = fDistance[i1][i2]; if (fBestDist < fDist) { fBestDist = fDist; } } } if (m_nLinkType == COMPLETE) { break; } // calculate adjustment, which is the largest within cluster distance double fMaxDist = 0; for (int i = 0; i < cluster1.size(); i++) { int i1 = cluster1.get(i); for (int j = i + 1; j < cluster1.size(); j++) { int i2 = cluster1.get(j); double fDist = fDistance[i1][i2]; if (fMaxDist < fDist) { fMaxDist = fDist; } } } for (int i = 0; i < cluster2.size(); i++) { int i1 = cluster2.get(i); for (int j = i + 1; j < cluster2.size(); j++) { int i2 = cluster2.get(j); double fDist = fDistance[i1][i2]; if (fMaxDist < fDist) { fMaxDist = fDist; } } } fBestDist -= fMaxDist; break; case AVERAGE: // finds average distance between the elements of the two clusters fBestDist = 0; for (int i = 0; i < cluster1.size(); i++) { int i1 = cluster1.get(i); for (int j = 0; j < cluster2.size(); j++) { int i2 = cluster2.get(j); fBestDist += fDistance[i1][i2]; } } fBestDist /= (cluster1.size() * cluster2.size()); break; case MEAN: { // calculates the mean distance of a merged cluster (akak Group-average agglomerative clustering) List<Integer> merged = new ArrayList<Integer>(); merged.addAll(cluster1); merged.addAll(cluster2); fBestDist = 0; for (int i = 0; i < merged.size(); i++) { int i1 = merged.get(i); for (int j = i + 1; j < merged.size(); j++) { int i2 = merged.get(j); fBestDist += fDistance[i1][i2]; } } int n = merged.size(); fBestDist /= (n * (n - 1.0) / 2.0); } break; case CENTROID: // finds the distance of the centroids of the clusters int nPatterns = dataInput.get().getPatternCount(); double[] centroid1 = new double[nPatterns]; for (int i = 0; i < cluster1.size(); i++) { int iTaxon = cluster1.get(i); for (int j = 0; j < nPatterns; j++) { centroid1[j] += dataInput.get().getPattern(iTaxon, j); } } double[] centroid2 = new double[nPatterns]; for (int i = 0; i < cluster2.size(); i++) { int iTaxon = cluster2.get(i); for (int j = 0; j < nPatterns; j++) { centroid2[j] += dataInput.get().getPattern(iTaxon, j); } } for (int j = 0; j < nPatterns; j++) { centroid1[j] /= cluster1.size(); centroid2[j] /= cluster2.size(); } fBestDist = distance(centroid1, centroid2); break; case WARD: { // finds the distance of the change in caused by merging the cluster. // The information of a cluster is calculated as the error sum of squares of the // centroids of the cluster and its members. double ESS1 = calcESS(cluster1); double ESS2 = calcESS(cluster2); List<Integer> merged = new ArrayList<Integer>(); merged.addAll(cluster1); merged.addAll(cluster2); double ESS = calcESS(merged); fBestDist = ESS * merged.size() - ESS1 * cluster1.size() - ESS2 * cluster2.size(); } break; } return fBestDist; } // getDistance /** * calculated error sum-of-squares for instances wrt centroid * */ double calcESS(List<Integer> cluster) { int nPatterns = dataInput.get().getPatternCount(); double[] centroid = new double[nPatterns]; for (int i = 0; i < cluster.size(); i++) { int iTaxon = cluster.get(i); for (int j = 0; j < nPatterns; j++) { centroid[j] += dataInput.get().getPattern(iTaxon, j); } } for (int j = 0; j < nPatterns; j++) { centroid[j] /= cluster.size(); } // set up two instances for distance function double fESS = 0; for (int i = 0; i < cluster.size(); i++) { double[] instance = new double[nPatterns]; int iTaxon = cluster.get(i); for (int j = 0; j < nPatterns; j++) { instance[j] += dataInput.get().getPattern(iTaxon, j); } fESS += distance(centroid, instance); } return fESS / cluster.size(); } // calcESS @Override public void initStateNodes() { if (m_initial.get() != null) { m_initial.get().assignFromWithoutID(this); } } @Override public List<StateNode> getInitialisedStateNodes() { List<StateNode> stateNodes = new ArrayList<StateNode>(); if (m_initial.get() != null) { stateNodes.add(m_initial.get()); } return stateNodes; } } // class ClusterTree
package stormpot.benchmark; import java.util.concurrent.atomic.AtomicInteger; import com.google.caliper.SimpleBenchmark; public class PartitionSelect extends SimpleBenchmark { private static final int partitions = 10; private static final int base = -358431684; private static volatile int counter; private static final ThreadLocal<Integer> tlsInt = new ThreadLocal<Integer>(); private static int randInt; @Override protected void setUp() { counter = 0; tlsInt.set(new Integer(1)); randInt = (int) Math.random(); } public int timeModuloConst(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + randInt % partitions; } return result; } public int timeModuloThreadId(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + Thread.currentThread().getId() % partitions; } return result; } /* * The AtomicInteger based tests are flawed in that the CASes are never * contended. So the increments never see a CAS failure. */ public int timeModuloAtomicCount(int reps) { AtomicInteger counter = new AtomicInteger(); int result = base; for (int i = 0; i < reps; i++) { result ^= result + counter.incrementAndGet() % partitions; } return result; } /* * The Scalable Counter based tests are flawed in that there is never any * contention. So the result is always accurate, and the cache-line never * have to do a hand-over, so it will remain exclusive to the current CPU * core. The x86 CPUs knows how to do this fast, presumably. */ public int timeModuloScalableCounter(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + (++counter) % partitions; } return result; } public int timeThreadLocalRef(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + tlsInt.get().intValue(); } return result; } public int timeModuloIdentityHashThread(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + System.identityHashCode(Thread.currentThread()) % partitions; } return result; } public int timeModuloIdentityHashNewObject(int reps) { int result = base; for (int i = 0; i < reps; i++) { result ^= result + System.identityHashCode(new Object()) % partitions; } return result; } }
//FILE: LiveModeTimer.java //PROJECT: Micro-Manager //SUBSYSTEM: mmstudio // This file is distributed in the hope that it will be useful, // of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES. package org.micromanager.acquisition; import java.text.NumberFormat; import java.util.HashSet; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.LinkedBlockingQueue; import mmcorej.CMMCore; import mmcorej.TaggedImage; import org.micromanager.MMStudioMainFrame; import org.micromanager.utils.CanvasPaintPending; import org.micromanager.utils.ReportingUtils; /** * This class extends the java swing timer. It periodically retrieves images * from the core and displays them in the live window * * @author Henry Pinkard */ public class LiveModeTimer { private static final String ACQ_NAME = MMStudioMainFrame.SIMPLE_ACQ; private VirtualAcquisitionDisplay win_; private CMMCore core_; private MMStudioMainFrame gui_; private int multiChannelCameraNrCh_; private long fpsTimer_; private long fpsCounter_; private long imageNumber_; private long oldImageNumber_; private long fpsInterval_ = 5000; private final NumberFormat format_; private boolean running_ = false; private Timer timer_; private TimerTask task_; private MMStudioMainFrame.DisplayImageRoutine displayImageRoutine_; private LinkedBlockingQueue imageQueue_; private static int mCamImageCounter_ = 0; private boolean multiCam_ = false; public LiveModeTimer() { gui_ = MMStudioMainFrame.getInstance(); core_ = gui_.getCore(); format_ = NumberFormat.getInstance(); format_.setMaximumFractionDigits(0x1); mCamImageCounter_ = 0; displayImageRoutine_ = new MMStudioMainFrame.DisplayImageRoutine() { public void show(final TaggedImage ti) { try { // The multiCamLiveTask needs synchronization at this point // The multiCamLiveTask generates tagged images in groups of // multiChannelCameraNrCh_, however, we only want to update // the display (which is costly) when we have the whole group if (multiCam_) { mCamImageCounter_++; if (mCamImageCounter_ < multiChannelCameraNrCh_) { gui_.normalizeTags(ti); gui_.addImage(ACQ_NAME, ti, false, false); return; } else { // completes the set mCamImageCounter_ = 0; } } if (!CanvasPaintPending.isMyPaintPending( gui_.getImageWin().getCanvas(), this) ) { CanvasPaintPending.setPaintPending( gui_.getImageWin().getCanvas(), this); gui_.normalizeTags(ti); gui_.addImage(ACQ_NAME, ti, true, true); gui_.updateLineProfile(); } } catch (Exception e) { ReportingUtils.logError(e); } } }; } /** * Determines the optimum interval for the live mode timer task to happen * As a side effect, also sets variable fpsInterval_ */ private long getInterval() { double interval = 20; try { interval = Math.max(core_.getExposure(), interval); } catch (Exception e) { ReportingUtils.logError("Unable to get exposure from core"); } fpsInterval_ = (long) (20 * interval); if (fpsInterval_ < 1000) fpsInterval_ = 1000; return (int) interval; } /** * Determines whether we are dealing with multiple cameras */ private void setType() { multiChannelCameraNrCh_ = (int) core_.getNumberOfCameraChannels(); if (multiChannelCameraNrCh_ == 1) { task_ = singleCameraLiveTask(); multiCam_ = false; } else { task_ = multiCamLiveTask(); multiCam_ = true; } } public boolean isRunning() { return running_; } public void begin() throws Exception { if(running_) { return; } timer_ = new Timer("Live mode timer"); core_.clearCircularBuffer(); core_.startContinuousSequenceAcquisition(0); setType(); long delay = getInterval(); // Wait for first image to create ImageWindow, so that we can be sure about image size long start = System.currentTimeMillis(); long now = start; long timeout = Math.min(10000, delay * 150); while (core_.getRemainingImageCount() == 0 && (now - start < timeout) ) { now = System.currentTimeMillis(); Thread.sleep(5); } if (now - start >= timeout) { throw new Exception("Camera did not send image within a reasonable time"); } TaggedImage timg = core_.getLastTaggedImage(); // With first image acquired, create the display gui_.checkSimpleAcquisition(); win_ = MMStudioMainFrame.getSimpleDisplay(); fpsCounter_ = 0; fpsTimer_ = System.currentTimeMillis(); imageNumber_ = timg.tags.getLong("ImageNumber"); oldImageNumber_ = imageNumber_; imageQueue_ = new LinkedBlockingQueue(); timer_.schedule(task_, 0, delay); win_.liveModeEnabled(true); win_.getImagePlus().getWindow().toFront(); running_ = true; gui_.runDisplayThread(imageQueue_, displayImageRoutine_); } public void stop() { stop(true); } private void stop(boolean firstAttempt) { try { if (imageQueue_ != null) imageQueue_.put(TaggedImageQueue.POISON); } catch (InterruptedException ex) { ReportingUtils.logError(ex); } if (timer_ != null) { timer_.cancel(); } try { if (core_.isSequenceRunning()) core_.stopSequenceAcquisition(); if (win_ != null) { win_.liveModeEnabled(false); } running_ = false; } catch (Exception ex) { try { } catch (Exception e) { ReportingUtils.showError("Error closing shutter"); } ReportingUtils.showError(ex); //Wait 1 s and try to stop again if (firstAttempt) { final Timer delayStop = new Timer(); delayStop.schedule( new TimerTask() { @Override public void run() { stop(false); }},1000); } } } /** * Keep track of the last imagenumber, added by the circular buffer * that we have seen here * * @param imageNumber */ private synchronized void setImageNumber(long imageNumber) { imageNumber_ = imageNumber; } /** * Updates the fps timer (how fast does the camera pump images into the * circular buffer) and display fps (how fast do we display the images) * It is called from tasks that are doing the actual image drawing * */ public synchronized void updateFPS() { if (!running_) return; try { fpsCounter_++; long now = System.currentTimeMillis(); long diff = now - fpsTimer_; if (diff > fpsInterval_) { double d = diff/ 1000.0; double fps = fpsCounter_ / d; double dfps = (imageNumber_ - oldImageNumber_) / d; win_.displayStatusLine("fps: " + format_.format(dfps) + ", display fps: " + format_.format(fps)); fpsCounter_ = 0; fpsTimer_ = now; oldImageNumber_ = imageNumber_; } } catch (Exception ex) { ReportingUtils.logError(ex); } } /** * Task executed to display live images when using a single camera * * @return */ private TimerTask singleCameraLiveTask() { return new TimerTask() { @Override public void run() { if (core_.getRemainingImageCount() == 0) { return; } if (win_.windowClosed()) //check is user closed window { gui_.enableLiveMode(false); } else { try { TaggedImage ti = core_.getLastTaggedImage(); // if we have already shown this image, do not do it again. setImageNumber(ti.tags.getLong("ImageNumber")); imageQueue_.put(ti); } catch (Exception ex) { ReportingUtils.logMessage("Stopping live mode because of error..."); gui_.enableLiveMode(false); ReportingUtils.showError(ex); } } } }; } private TimerTask multiCamLiveTask() { return new TimerTask() { @Override public void run() { if (core_.getRemainingImageCount() == 0) { return; } if (win_.windowClosed() || !gui_.acquisitionExists(MMStudioMainFrame.SIMPLE_ACQ)) { gui_.enableLiveMode(false); //disable live if user closed window } else { try { String camera = core_.getCameraDevice(); Set<String> cameraChannelsAcquired = new HashSet<String>(); for (int i = 0; i < 2 * multiChannelCameraNrCh_; ++i) { TaggedImage ti = core_.getNBeforeLastTaggedImage(i); String channelName; if (ti.tags.has(camera + "-CameraChannelName")) { channelName = ti.tags.getString(camera + "-CameraChannelName"); if (!cameraChannelsAcquired.contains(channelName)) { ti.tags.put("Channel", channelName); int ccIndex = ti.tags.getInt(camera + "-CameraChannelIndex"); ti.tags.put("ChannelIndex", ccIndex); if (ccIndex == 0) { setImageNumber(ti.tags.getLong("ImageNumber")); } imageQueue_.put(ti); cameraChannelsAcquired.add(channelName); } if (cameraChannelsAcquired.size() == multiChannelCameraNrCh_) { break; } } } } catch (Exception exc) { ReportingUtils.logMessage("Stopping live mode because of error..."); gui_.enableLiveMode(false); ReportingUtils.showError(exc); } } } }; } }
package org.csstudio.sds.components.ui.internal.editparts; import org.csstudio.sds.components.model.ActionButtonModel; import org.csstudio.sds.components.model.LabelModel; import org.csstudio.sds.components.model.MenuButtonModel; import org.csstudio.sds.components.ui.internal.figures.RefreshableLabelFigure; import org.csstudio.sds.components.ui.internal.utils.WidgetActionHandlerService; import org.csstudio.sds.model.properties.actions.WidgetAction; import org.csstudio.sds.ui.CheckedUiRunnable; import org.csstudio.sds.ui.editparts.AbstractWidgetEditPart; import org.csstudio.sds.ui.editparts.ExecutionMode; import org.csstudio.sds.ui.editparts.IWidgetPropertyChangeHandler; import org.csstudio.sds.util.CustomMediaFactory; import org.eclipse.draw2d.IFigure; import org.eclipse.draw2d.MouseEvent; import org.eclipse.draw2d.MouseListener; import org.eclipse.draw2d.geometry.Point; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.FontData; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.MenuItem; import org.eclipse.swt.widgets.Shell; import org.eclipse.ui.PlatformUI; /** * * @author Helge Rickens, Kai Meyer * */ public final class MenuButtonEditPart extends AbstractWidgetEditPart { /** * The {@link Listener} for the {@link MenuItem}s. */ private Listener _listener = new MenuActionListener(); /** * {@inheritDoc} */ @Override protected IFigure doCreateFigure() { final MenuButtonModel model = (MenuButtonModel) getWidgetModel(); RefreshableLabelFigure label = new RefreshableLabelFigure(); label.setTextValue(model.getLabel()); label.setFont(CustomMediaFactory.getInstance().getFont( model.getFont())); label.setTextAlignment(model.getTextAlignment()); label.setTransparent(false); label.addMouseListener(new MouseListener() { public void mouseDoubleClicked(final MouseEvent me) { } public void mousePressed(final MouseEvent me) { if (getExecutionMode().equals(ExecutionMode.RUN_MODE)) { final org.eclipse.swt.graphics.Point cursorLocation = Display.getCurrent().getCursorLocation(); new CheckedUiRunnable() { protected void doRunInUi() { performDirectEdit(me.getLocation(), cursorLocation.x, cursorLocation.y); } }; } } public void mouseReleased(final MouseEvent me) { } }); return label; } /** * Open the cell editor for direct editing. * @param point the location of the mouse-event * @param absolutX The x coordinate of the mouse in the display * @param absolutY The y coordinate of the mouse in the display */ private void performDirectEdit(final Point point, final int absolutX, final int absolutY) { Shell shell = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(); Menu menu = new Menu(shell, SWT.POP_UP); for (WidgetAction action : ((MenuButtonModel)this.getCastedModel()).getActionData().getWidgetActions()) { MenuItem item1 = new MenuItem(menu,SWT.PUSH); item1.setData(action); item1.setText(action.getActionLabel()); item1.addListener(SWT.Selection, _listener); } int x = absolutX; int y = absolutY; x = x - point.x + this.getCastedModel().getX(); y = y - point.y + this.getCastedModel().getY() + this.getCastedModel().getHeight(); menu.setLocation(x,y); menu.setVisible(true); while(!menu.isDisposed() && menu.isVisible()){ if(!Display.getCurrent().readAndDispatch()){ Display.getCurrent().sleep(); } } menu.dispose(); } /** * Returns the Figure of this EditPart. * @return RefreshableActionButtonFigure * The RefreshableActionButtonFigure of this EditPart */ protected RefreshableLabelFigure getCastedFigure() { return (RefreshableLabelFigure) getFigure(); } /** * {@inheritDoc} */ @Override protected void registerPropertyChangeHandlers() { // label IWidgetPropertyChangeHandler labelHandler = new IWidgetPropertyChangeHandler() { public boolean handleChange(final Object oldValue, final Object newValue, final IFigure refreshableFigure) { RefreshableLabelFigure figure = getCastedFigure(); figure.setTextValue(newValue.toString()); return true; } }; setPropertyChangeHandler(ActionButtonModel.PROP_LABEL, labelHandler); // font IWidgetPropertyChangeHandler fontHandler = new IWidgetPropertyChangeHandler() { public boolean handleChange(final Object oldValue, final Object newValue, final IFigure refreshableFigure) { RefreshableLabelFigure figure = getCastedFigure(); FontData fontData = (FontData) newValue; figure.setFont(CustomMediaFactory.getInstance().getFont( fontData.getName(), fontData.getHeight(), fontData.getStyle())); return true; } }; setPropertyChangeHandler(LabelModel.PROP_FONT, fontHandler); // text alignment IWidgetPropertyChangeHandler alignmentHandler = new IWidgetPropertyChangeHandler() { public boolean handleChange(final Object oldValue, final Object newValue, final IFigure refreshableFigure) { RefreshableLabelFigure figure = getCastedFigure(); figure.setTextAlignment((Integer)newValue); return true; } }; setPropertyChangeHandler(ActionButtonModel.PROP_TEXT_ALIGNMENT, alignmentHandler); } /** * The {@link Listener} for the {@link MenuItem}s. * @author Kai Meyer * */ private final class MenuActionListener implements Listener { /** * {@inheritDoc} */ public void handleEvent(final Event event) { WidgetAction action = (WidgetAction)event.widget.getData(); WidgetActionHandlerService.getInstance().performAction(getCastedModel().getProperty(MenuButtonModel.PROP_ACTIONDATA), action); } } }
package org.jetel.ctl; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.math.BigDecimal; import java.math.BigInteger; import java.math.MathContext; import java.math.RoundingMode; import java.net.URL; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import junit.framework.AssertionFailedError; import org.jetel.component.CTLRecordTransform; import org.jetel.component.RecordTransform; import org.jetel.data.DataField; import org.jetel.data.DataRecord; import org.jetel.data.DataRecordFactory; import org.jetel.data.SetVal; import org.jetel.data.lookup.LookupTable; import org.jetel.data.lookup.LookupTableFactory; import org.jetel.data.primitive.Decimal; import org.jetel.data.sequence.Sequence; import org.jetel.data.sequence.SequenceFactory; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.ConfigurationStatus; import org.jetel.exception.TransformException; import org.jetel.graph.ContextProvider; import org.jetel.graph.ContextProvider.Context; import org.jetel.graph.TransformationGraph; import org.jetel.metadata.DataFieldContainerType; import org.jetel.metadata.DataFieldMetadata; import org.jetel.metadata.DataFieldType; import org.jetel.metadata.DataRecordMetadata; import org.jetel.test.CloverTestCase; import org.jetel.util.MiscUtils; import org.jetel.util.bytes.PackedDecimal; import org.jetel.util.crypto.Base64; import org.jetel.util.crypto.Digest; import org.jetel.util.crypto.Digest.DigestType; import org.jetel.util.primitive.TypedProperties; import org.jetel.util.string.StringUtils; import org.joda.time.DateTime; import org.joda.time.Years; public abstract class CompilerTestCase extends CloverTestCase { protected static final String INPUT_1 = "firstInput"; protected static final String INPUT_2 = "secondInput"; protected static final String INPUT_3 = "thirdInput"; protected static final String INPUT_4 = "multivalueInput"; protected static final String OUTPUT_1 = "firstOutput"; protected static final String OUTPUT_2 = "secondOutput"; protected static final String OUTPUT_3 = "thirdOutput"; protected static final String OUTPUT_4 = "fourthOutput"; protected static final String OUTPUT_5 = "firstMultivalueOutput"; protected static final String OUTPUT_6 = "secondMultivalueOutput"; protected static final String OUTPUT_7 = "thirdMultivalueOutput"; protected static final String LOOKUP = "lookupMetadata"; protected static final String NAME_VALUE = " HELLO "; protected static final Double AGE_VALUE = 20.25; protected static final String CITY_VALUE = "Chong'La"; protected static final Date BORN_VALUE; protected static final Long BORN_MILLISEC_VALUE; static { Calendar c = Calendar.getInstance(); c.set(2008, 12, 25, 13, 25, 55); c.set(Calendar.MILLISECOND, 333); BORN_VALUE = c.getTime(); BORN_MILLISEC_VALUE = c.getTimeInMillis(); } protected static final Integer VALUE_VALUE = Integer.MAX_VALUE - 10; protected static final Boolean FLAG_VALUE = true; protected static final byte[] BYTEARRAY_VALUE = "Abeceda zedla deda".getBytes(); protected static final BigDecimal CURRENCY_VALUE = new BigDecimal("133.525"); protected static final int DECIMAL_PRECISION = 7; protected static final int DECIMAL_SCALE = 3; protected static final int NORMALIZE_RETURN_OK = 0; public static final int DECIMAL_MAX_PRECISION = 32; public static final MathContext MAX_PRECISION = new MathContext(DECIMAL_MAX_PRECISION,RoundingMode.DOWN); /** Flag to trigger Java compilation */ private boolean compileToJava; protected DataRecord[] inputRecords; protected DataRecord[] outputRecords; protected TransformationGraph graph; public CompilerTestCase(boolean compileToJava) { this.compileToJava = compileToJava; } /** * Method to execute tested CTL code in a way specific to testing scenario. * * Assumes that * {@link #graph}, {@link #inputRecords} and {@link #outputRecords} * have already been set. * * @param compiler */ public abstract void executeCode(ITLCompiler compiler); /** * Method which provides access to specified global variable * * @param varName * global variable to be accessed * @return * */ protected abstract Object getVariable(String varName); protected void check(String varName, Object expectedResult) { assertEquals(varName, expectedResult, getVariable(varName)); } protected void checkEquals(String varName1, String varName2) { assertEquals("Comparing " + varName1 + " and " + varName2 + " : ", getVariable(varName1), getVariable(varName2)); } protected void checkNull(String varName) { assertNull(getVariable(varName)); } private void checkArray(String varName, byte[] expected) { byte[] actual = (byte[]) getVariable(varName); assertTrue("Arrays do not match; expected: " + byteArrayAsString(expected) + " but was " + byteArrayAsString(actual), Arrays.equals(actual, expected)); } private static String byteArrayAsString(byte[] array) { final StringBuilder sb = new StringBuilder("["); for (final byte b : array) { sb.append(b); sb.append(", "); } sb.delete(sb.length() - 2, sb.length()); sb.append(']'); return sb.toString(); } @Override protected void setUp() { // set default locale to English to prevent various parsing errors Locale.setDefault(Locale.ENGLISH); initEngine(); } @Override protected void tearDown() throws Exception { super.tearDown(); inputRecords = null; outputRecords = null; graph = null; } protected TransformationGraph createEmptyGraph() { return new TransformationGraph(); } protected TransformationGraph createDefaultGraph() { TransformationGraph g = createEmptyGraph(); // set the context URL, so that imports can be used g.getRuntimeContext().setContextURL(CompilerTestCase.class.getResource(".")); final HashMap<String, DataRecordMetadata> metadataMap = new HashMap<String, DataRecordMetadata>(); metadataMap.put(INPUT_1, createDefaultMetadata(INPUT_1)); metadataMap.put(INPUT_2, createDefaultMetadata(INPUT_2)); metadataMap.put(INPUT_3, createDefaultMetadata(INPUT_3)); metadataMap.put(INPUT_4, createDefaultMultivalueMetadata(INPUT_4)); metadataMap.put(OUTPUT_1, createDefaultMetadata(OUTPUT_1)); metadataMap.put(OUTPUT_2, createDefaultMetadata(OUTPUT_2)); metadataMap.put(OUTPUT_3, createDefaultMetadata(OUTPUT_3)); metadataMap.put(OUTPUT_4, createDefault1Metadata(OUTPUT_4)); metadataMap.put(OUTPUT_5, createDefaultMultivalueMetadata(OUTPUT_5)); metadataMap.put(OUTPUT_6, createDefaultMultivalueMetadata(OUTPUT_6)); metadataMap.put(OUTPUT_7, createDefaultMultivalueMetadata(OUTPUT_7)); metadataMap.put(LOOKUP, createDefaultMetadata(LOOKUP)); g.addDataRecordMetadata(metadataMap); g.addSequence(createDefaultSequence(g, "TestSequence")); g.addLookupTable(createDefaultLookup(g, "TestLookup")); Properties properties = new Properties(); properties.put("PROJECT", "."); properties.put("DATAIN_DIR", "${PROJECT}/data-in"); properties.put("COUNT", "`1+2`"); properties.put("NEWLINE", "\\n"); g.setGraphProperties(properties); initDefaultDictionary(g); return g; } private void initDefaultDictionary(TransformationGraph g) { try { g.getDictionary().init(); g.getDictionary().setValue("s", "string", null); g.getDictionary().setValue("i", "integer", null); g.getDictionary().setValue("l", "long", null); g.getDictionary().setValue("d", "decimal", null); g.getDictionary().setValue("n", "number", null); g.getDictionary().setValue("a", "date", null); g.getDictionary().setValue("b", "boolean", null); g.getDictionary().setValue("y", "byte", null); g.getDictionary().setValue("i211", "integer", new Integer(211)); g.getDictionary().setValue("sVerdon", "string", "Verdon"); g.getDictionary().setValue("l452", "long", new Long(452)); g.getDictionary().setValue("d621", "decimal", new BigDecimal(621)); g.getDictionary().setValue("n9342", "number", new Double(934.2)); g.getDictionary().setValue("a1992", "date", new GregorianCalendar(1992, GregorianCalendar.AUGUST, 1).getTime()); g.getDictionary().setValue("bTrue", "boolean", Boolean.TRUE); g.getDictionary().setValue("yFib", "byte", new byte[]{1,2,3,5,8,13,21,34,55,89} ); g.getDictionary().setValue("stringList", "list", Arrays.asList("aa", "bb", null, "cc")); g.getDictionary().setContentType("stringList", "string"); g.getDictionary().setValue("dateList", "list", Arrays.asList(new Date(12000), new Date(34000), null, new Date(56000))); g.getDictionary().setContentType("dateList", "date"); g.getDictionary().setValue("byteList", "list", Arrays.asList(new byte[] {0x12}, new byte[] {0x34, 0x56}, null, new byte[] {0x78})); g.getDictionary().setContentType("byteList", "byte"); } catch (ComponentNotReadyException e) { throw new RuntimeException("Error init default dictionary", e); } } protected Sequence createDefaultSequence(TransformationGraph graph, String name) { Sequence seq = SequenceFactory.createSequence(graph, "PRIMITIVE_SEQUENCE", new Object[] { "Sequence0", graph, name }, new Class[] { String.class, TransformationGraph.class, String.class }); try { seq.checkConfig(new ConfigurationStatus()); seq.init(); } catch (ComponentNotReadyException e) { throw new RuntimeException(e); } return seq; } /** * Creates default lookup table of type SimpleLookupTable with 4 records using default metadata and a composite * lookup key Name+Value. Use field City for testing response. * * @param graph * @param name * @return */ protected LookupTable createDefaultLookup(TransformationGraph graph, String name) { final TypedProperties props = new TypedProperties(); props.setProperty("id", "LookupTable0"); props.setProperty("type", "simpleLookup"); props.setProperty("metadata", LOOKUP); props.setProperty("key", "Name;Value"); props.setProperty("name", name); props.setProperty("keyDuplicates", "true"); /* * The test lookup table is populated from file TestLookup.dat. Alternatively uncomment the populating code * below, however this will most probably break down test_lookup() because free() will wipe away all data and * noone will restore them */ URL dataFile = getClass().getSuperclass().getResource("TestLookup.dat"); if (dataFile == null) { throw new RuntimeException("Unable to populate testing lookup table. File 'TestLookup.dat' not found by classloader"); } props.setProperty("fileURL", dataFile.getFile()); LookupTableFactory.init(); LookupTable lkp = LookupTableFactory.createLookupTable(props); lkp.setGraph(graph); try { lkp.checkConfig(new ConfigurationStatus()); lkp.init(); lkp.preExecute(); } catch (ComponentNotReadyException ex) { throw new RuntimeException(ex); } /*DataRecord lkpRecord = createEmptyRecord(createDefaultMetadata("lookupResponse")); lkpRecord.getField("Name").setValue("Alpha"); lkpRecord.getField("Value").setValue(1); lkpRecord.getField("City").setValue("Andorra la Vella"); lkp.put(lkpRecord); lkpRecord.getField("Name").setValue("Bravo"); lkpRecord.getField("Value").setValue(2); lkpRecord.getField("City").setValue("Bruxelles"); lkp.put(lkpRecord); // duplicate entry lkpRecord.getField("Name").setValue("Charlie"); lkpRecord.getField("Value").setValue(3); lkpRecord.getField("City").setValue("Chamonix"); lkp.put(lkpRecord); lkpRecord.getField("Name").setValue("Charlie"); lkpRecord.getField("Value").setValue(3); lkpRecord.getField("City").setValue("Chomutov"); lkp.put(lkpRecord);*/ return lkp; } /** * Creates records with default structure * * @param name * name for the record to use * @return metadata with default structure */ protected DataRecordMetadata createDefaultMetadata(String name) { DataRecordMetadata ret = new DataRecordMetadata(name); ret.addField(new DataFieldMetadata("Name", DataFieldType.STRING, "|")); ret.addField(new DataFieldMetadata("Age", DataFieldType.NUMBER, "|")); ret.addField(new DataFieldMetadata("City", DataFieldType.STRING, "|")); DataFieldMetadata dateField = new DataFieldMetadata("Born", DataFieldType.DATE, "|"); dateField.setFormatStr("yyyy-MM-dd HH:mm:ss"); ret.addField(dateField); ret.addField(new DataFieldMetadata("BornMillisec", DataFieldType.LONG, "|")); ret.addField(new DataFieldMetadata("Value", DataFieldType.INTEGER, "|")); ret.addField(new DataFieldMetadata("Flag", DataFieldType.BOOLEAN, "|")); ret.addField(new DataFieldMetadata("ByteArray", DataFieldType.BYTE, "|")); DataFieldMetadata decimalField = new DataFieldMetadata("Currency", DataFieldType.DECIMAL, "\n"); decimalField.setProperty(DataFieldMetadata.LENGTH_ATTR, String.valueOf(DECIMAL_PRECISION)); decimalField.setProperty(DataFieldMetadata.SCALE_ATTR, String.valueOf(DECIMAL_SCALE)); ret.addField(decimalField); return ret; } /** * Creates records with default structure * * @param name * name for the record to use * @return metadata with default structure */ protected DataRecordMetadata createDefault1Metadata(String name) { DataRecordMetadata ret = new DataRecordMetadata(name); ret.addField(new DataFieldMetadata("Field1", DataFieldType.STRING, "|")); ret.addField(new DataFieldMetadata("Age", DataFieldType.NUMBER, "|")); ret.addField(new DataFieldMetadata("City", DataFieldType.STRING, "|")); return ret; } /** * Creates records with default structure * containing multivalue fields. * * @param name * name for the record to use * @return metadata with default structure */ protected DataRecordMetadata createDefaultMultivalueMetadata(String name) { DataRecordMetadata ret = new DataRecordMetadata(name); DataFieldMetadata stringListField = new DataFieldMetadata("stringListField", DataFieldType.STRING, "|"); stringListField.setContainerType(DataFieldContainerType.LIST); ret.addField(stringListField); DataFieldMetadata dateField = new DataFieldMetadata("dateField", DataFieldType.DATE, "|"); ret.addField(dateField); DataFieldMetadata byteField = new DataFieldMetadata("byteField", DataFieldType.BYTE, "|"); ret.addField(byteField); DataFieldMetadata dateListField = new DataFieldMetadata("dateListField", DataFieldType.DATE, "|"); dateListField.setContainerType(DataFieldContainerType.LIST); ret.addField(dateListField); DataFieldMetadata byteListField = new DataFieldMetadata("byteListField", DataFieldType.BYTE, "|"); byteListField.setContainerType(DataFieldContainerType.LIST); ret.addField(byteListField); DataFieldMetadata stringField = new DataFieldMetadata("stringField", DataFieldType.STRING, "|"); ret.addField(stringField); DataFieldMetadata integerMapField = new DataFieldMetadata("integerMapField", DataFieldType.INTEGER, "|"); integerMapField.setContainerType(DataFieldContainerType.MAP); ret.addField(integerMapField); DataFieldMetadata stringMapField = new DataFieldMetadata("stringMapField", DataFieldType.STRING, "|"); stringMapField.setContainerType(DataFieldContainerType.MAP); ret.addField(stringMapField); DataFieldMetadata dateMapField = new DataFieldMetadata("dateMapField", DataFieldType.DATE, "|"); dateMapField.setContainerType(DataFieldContainerType.MAP); ret.addField(dateMapField); DataFieldMetadata byteMapField = new DataFieldMetadata("byteMapField", DataFieldType.BYTE, "|"); byteMapField.setContainerType(DataFieldContainerType.MAP); ret.addField(byteMapField); DataFieldMetadata integerListField = new DataFieldMetadata("integerListField", DataFieldType.INTEGER, "|"); integerListField.setContainerType(DataFieldContainerType.LIST); ret.addField(integerListField); DataFieldMetadata decimalListField = new DataFieldMetadata("decimalListField", DataFieldType.DECIMAL, "|"); decimalListField.setContainerType(DataFieldContainerType.LIST); ret.addField(decimalListField); DataFieldMetadata decimalMapField = new DataFieldMetadata("decimalMapField", DataFieldType.DECIMAL, "|"); decimalMapField.setContainerType(DataFieldContainerType.MAP); ret.addField(decimalMapField); return ret; } protected DataRecord createDefaultMultivalueRecord(DataRecordMetadata dataRecordMetadata) { final DataRecord ret = DataRecordFactory.newRecord(dataRecordMetadata); ret.init(); for (int i = 0; i < ret.getNumFields(); i++) { DataField field = ret.getField(i); DataFieldMetadata fieldMetadata = field.getMetadata(); switch (fieldMetadata.getContainerType()) { case SINGLE: switch (fieldMetadata.getDataType()) { case STRING: field.setValue("John"); break; case DATE: field.setValue(new Date(10000)); break; case BYTE: field.setValue(new byte[] { 0x12, 0x34, 0x56, 0x78 } ); break; default: throw new UnsupportedOperationException("Not implemented."); } break; case LIST: { List<Object> value = new ArrayList<Object>(); switch (fieldMetadata.getDataType()) { case STRING: value.addAll(Arrays.asList("John", "Doe", "Jersey")); break; case INTEGER: value.addAll(Arrays.asList(123, 456, 789)); break; case DATE: value.addAll(Arrays.asList(new Date (12000), new Date(34000))); break; case BYTE: value.addAll(Arrays.asList(new byte[] {0x12, 0x34}, new byte[] {0x56, 0x78})); break; case DECIMAL: value.addAll(Arrays.asList(12.34, 56.78)); break; default: throw new UnsupportedOperationException("Not implemented."); } field.setValue(value); } break; case MAP: { Map<String, Object> value = new HashMap<String, Object>(); switch (fieldMetadata.getDataType()) { case STRING: value.put("firstName", "John"); value.put("lastName", "Doe"); value.put("address", "Jersey"); break; case INTEGER: value.put("count", 123); value.put("max", 456); value.put("sum", 789); break; case DATE: value.put("before", new Date (12000)); value.put("after", new Date(34000)); break; case BYTE: value.put("hash", new byte[] {0x12, 0x34}); value.put("checksum", new byte[] {0x56, 0x78}); break; case DECIMAL: value.put("asset", 12.34); value.put("liability", 56.78); break; default: throw new UnsupportedOperationException("Not implemented."); } field.setValue(value); } break; default: throw new IllegalArgumentException(fieldMetadata.getContainerType().toString()); } } return ret; } protected DataRecord createDefaultRecord(DataRecordMetadata dataRecordMetadata) { final DataRecord ret = DataRecordFactory.newRecord(dataRecordMetadata); ret.init(); SetVal.setString(ret, "Name", NAME_VALUE); SetVal.setDouble(ret, "Age", AGE_VALUE); SetVal.setString(ret, "City", CITY_VALUE); SetVal.setDate(ret, "Born", BORN_VALUE); SetVal.setLong(ret, "BornMillisec", BORN_MILLISEC_VALUE); SetVal.setInt(ret, "Value", VALUE_VALUE); SetVal.setValue(ret, "Flag", FLAG_VALUE); SetVal.setValue(ret, "ByteArray", BYTEARRAY_VALUE); SetVal.setValue(ret, "Currency", CURRENCY_VALUE); return ret; } /** * Allocates new records with structure prescribed by metadata and sets all its fields to <code>null</code> * * @param metadata * structure to use * @return empty record */ protected DataRecord createEmptyRecord(DataRecordMetadata metadata) { DataRecord ret = DataRecordFactory.newRecord(metadata); ret.init(); for (int i = 0; i < ret.getNumFields(); i++) { SetVal.setNull(ret, i); } return ret; } /** * Executes the code using the default graph and records. */ protected void doCompile(String expStr, String testIdentifier) { TransformationGraph graph = createDefaultGraph(); DataRecord[] inRecords = new DataRecord[] { createDefaultRecord(graph.getDataRecordMetadata(INPUT_1)), createDefaultRecord(graph.getDataRecordMetadata(INPUT_2)), createEmptyRecord(graph.getDataRecordMetadata(INPUT_3)), createDefaultMultivalueRecord(graph.getDataRecordMetadata(INPUT_4)) }; DataRecord[] outRecords = new DataRecord[] { createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_1)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_2)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_3)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_4)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_5)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_6)), createEmptyRecord(graph.getDataRecordMetadata(OUTPUT_7)) }; doCompile(expStr, testIdentifier, graph, inRecords, outRecords); } /** * This method should be used to execute a test with a custom graph and custom input and output records. * * To execute a test with the default graph, * use {@link #doCompile(String)} * or {@link #doCompile(String, String)} instead. * * @param expStr * @param testIdentifier * @param graph * @param inRecords * @param outRecords */ protected void doCompile(String expStr, String testIdentifier, TransformationGraph graph, DataRecord[] inRecords, DataRecord[] outRecords) { this.graph = graph; this.inputRecords = inRecords; this.outputRecords = outRecords; // prepend the compilation mode prefix if (compileToJava) { expStr = "//#CTL2:COMPILE\n" + expStr; } print_code(expStr); DataRecordMetadata[] inMetadata = new DataRecordMetadata[inRecords.length]; for (int i = 0; i < inRecords.length; i++) { inMetadata[i] = inRecords[i].getMetadata(); } DataRecordMetadata[] outMetadata = new DataRecordMetadata[outRecords.length]; for (int i = 0; i < outRecords.length; i++) { outMetadata[i] = outRecords[i].getMetadata(); } ITLCompiler compiler = TLCompilerFactory.createCompiler(graph, inMetadata, outMetadata, "UTF-8"); // try { // System.out.println(compiler.convertToJava(expStr, CTLRecordTransform.class, testIdentifier)); // } catch (ErrorMessageException e) { // System.out.println("Error parsing CTL code. Unable to output Java translation."); List<ErrorMessage> messages = compiler.compile(expStr, CTLRecordTransform.class, testIdentifier); printMessages(messages); if (compiler.errorCount() > 0) { throw new AssertionFailedError("Error in execution. Check standard output for details."); } // CLVFStart parseTree = compiler.getStart(); // parseTree.dump(""); executeCode(compiler); } protected void doCompileExpectError(String expStr, String testIdentifier, List<String> errCodes) { graph = createDefaultGraph(); DataRecordMetadata[] inMetadata = new DataRecordMetadata[] { graph.getDataRecordMetadata(INPUT_1), graph.getDataRecordMetadata(INPUT_2), graph.getDataRecordMetadata(INPUT_3) }; DataRecordMetadata[] outMetadata = new DataRecordMetadata[] { graph.getDataRecordMetadata(OUTPUT_1), graph.getDataRecordMetadata(OUTPUT_2), graph.getDataRecordMetadata(OUTPUT_3), graph.getDataRecordMetadata(OUTPUT_4) }; // prepend the compilation mode prefix if (compileToJava) { expStr = "//#CTL2:COMPILE\n" + expStr; } print_code(expStr); ITLCompiler compiler = TLCompilerFactory.createCompiler(graph, inMetadata, outMetadata, "UTF-8"); List<ErrorMessage> messages = compiler.compile(expStr, CTLRecordTransform.class, testIdentifier); printMessages(messages); if (compiler.errorCount() == 0) { throw new AssertionFailedError("No errors in parsing. Expected " + errCodes.size() + " errors."); } if (compiler.errorCount() != errCodes.size()) { throw new AssertionFailedError(compiler.errorCount() + " errors in code, but expected " + errCodes.size() + " errors."); } Iterator<String> it = errCodes.iterator(); for (ErrorMessage errorMessage : compiler.getDiagnosticMessages()) { String expectedError = it.next(); if (!expectedError.equals(errorMessage.getErrorMessage())) { throw new AssertionFailedError("Error : \'" + compiler.getDiagnosticMessages().get(0).getErrorMessage() + "\', but expected: \'" + expectedError + "\'"); } } // CLVFStart parseTree = compiler.getStart(); // parseTree.dump(""); // executeCode(compiler); } protected void doCompileExpectError(String testIdentifier, String errCode) { doCompileExpectErrors(testIdentifier, Arrays.asList(errCode)); } protected void doCompileExpectErrors(String testIdentifier, List<String> errCodes) { URL importLoc = CompilerTestCase.class.getResource(testIdentifier + ".ctl"); if (importLoc == null) { throw new RuntimeException("Test case '" + testIdentifier + ".ctl" + "' not found"); } final StringBuilder sourceCode = new StringBuilder(); String line = null; try { BufferedReader rd = new BufferedReader(new InputStreamReader(importLoc.openStream())); while ((line = rd.readLine()) != null) { sourceCode.append(line).append("\n"); } rd.close(); } catch (IOException e) { throw new RuntimeException("I/O error occured when reading source file", e); } doCompileExpectError(sourceCode.toString(), testIdentifier, errCodes); } /** * Method loads tested CTL code from a file with the name <code>testIdentifier.ctl</code> The CTL code files should * be stored in the same directory as this class. * * @param Test * identifier defining CTL file to load code from */ protected String loadSourceCode(String testIdentifier) { URL importLoc = CompilerTestCase.class.getResource(testIdentifier + ".ctl"); if (importLoc == null) { throw new RuntimeException("Test case '" + testIdentifier + ".ctl" + "' not found"); } final StringBuilder sourceCode = new StringBuilder(); String line = null; try { BufferedReader rd = new BufferedReader(new InputStreamReader(importLoc.openStream())); while ((line = rd.readLine()) != null) { sourceCode.append(line).append("\n"); } rd.close(); } catch (IOException e) { throw new RuntimeException("I/O error occured when reading source file", e); } return sourceCode.toString(); } /** * Method loads and compiles tested CTL code from a file with the name <code>testIdentifier.ctl</code> The CTL code files should * be stored in the same directory as this class. * * The default graph and records are used for the execution. * * @param Test * identifier defining CTL file to load code from */ protected void doCompile(String testIdentifier) { String sourceCode = loadSourceCode(testIdentifier); doCompile(sourceCode, testIdentifier); } protected void printMessages(List<ErrorMessage> diagnosticMessages) { for (ErrorMessage e : diagnosticMessages) { System.out.println(e); } } /** * Compares two records if they have the same number of fields and identical values in their fields. Does not * consider (or examine) metadata. * * @param lhs * @param rhs * @return true if records have the same number of fields and the same values in them */ protected static boolean recordEquals(DataRecord lhs, DataRecord rhs) { if (lhs == rhs) return true; if (rhs == null) return false; if (lhs == null) { return false; } if (lhs.getNumFields() != rhs.getNumFields()) { return false; } for (int i = 0; i < lhs.getNumFields(); i++) { if (lhs.getField(i).isNull()) { if (!rhs.getField(i).isNull()) { return false; } } else if (!lhs.getField(i).equals(rhs.getField(i))) { return false; } } return true; } public void print_code(String text) { String[] lines = text.split("\n"); System.out.println("\t: 1 2 3 4 5 "); System.out.println("\t:12345678901234567890123456789012345678901234567890123456789"); for (int i = 0; i < lines.length; i++) { System.out.println((i + 1) + "\t:" + lines[i]); } } @SuppressWarnings("unchecked") public void test_operators_unary_record_allowed() { doCompile("test_operators_unary_record_allowed"); check("value", Arrays.asList(14, 16, 16, 65, 63, 63)); check("bornMillisec", Arrays.asList(14L, 16L, 16L, 65L, 63L, 63L)); List<Double> actualAge = (List<Double>) getVariable("age"); double[] expectedAge = {14.123, 16.123, 16.123, 65.789, 63.789, 63.789}; for (int i = 0; i < actualAge.size(); i++) { assertEquals("age[" + i + "]", expectedAge[i], actualAge.get(i), 0.0001); } check("currency", Arrays.asList( new BigDecimal(BigInteger.valueOf(12500), 3), new BigDecimal(BigInteger.valueOf(14500), 3), new BigDecimal(BigInteger.valueOf(14500), 3), new BigDecimal(BigInteger.valueOf(65432), 3), new BigDecimal(BigInteger.valueOf(63432), 3), new BigDecimal(BigInteger.valueOf(63432), 3) )); } @SuppressWarnings("unchecked") public void test_dynamic_compare() { doCompile("test_dynamic_compare"); String varName = "compare"; List<Integer> compareResult = (List<Integer>) getVariable(varName); for (int i = 0; i < compareResult.size(); i++) { if ((i % 3) == 0) { assertTrue(varName + "[" + i + "]", compareResult.get(i) > 0); } else if ((i % 3) == 1) { assertEquals(varName + "[" + i + "]", Integer.valueOf(0), compareResult.get(i)); } else if ((i % 3) == 2) { assertTrue(varName + "[" + i + "]", compareResult.get(i) < 0); } } varName = "compareBooleans"; compareResult = (List<Integer>) getVariable(varName); assertEquals(varName + "[0]", Integer.valueOf(0), compareResult.get(0)); assertTrue(varName + "[1]", compareResult.get(1) > 0); assertTrue(varName + "[2]", compareResult.get(2) < 0); assertEquals(varName + "[3]", Integer.valueOf(0), compareResult.get(3)); } private void test_dynamic_get_set_loop(String testIdentifier) { doCompile(testIdentifier); check("recordLength", 9); check("value", Arrays.asList(654321, 777777, 654321, 654323, 123456, 112567, 112233)); check("type", Arrays.asList("string", "number", "string", "date", "long", "integer", "boolean", "byte", "decimal")); check("asString", Arrays.asList("1000", "1001.0", "1002", "Thu Jan 01 01:00:01 CET 1970", "1004", "1005", "true", null, "1008.000")); check("isNull", Arrays.asList(false, false, false, false, false, false, false, true, false)); check("fieldName", Arrays.asList("Name", "Age", "City", "Born", "BornMillisec", "Value", "Flag", "ByteArray", "Currency")); Integer[] indices = new Integer[9]; for (int i = 0; i < indices.length; i++) { indices[i] = i; } check("fieldIndex", Arrays.asList(indices)); // check dynamic write and read with all data types check("booleanVar", true); assertTrue("byteVar", Arrays.equals(new BigInteger("1234567890abcdef", 16).toByteArray(), (byte[]) getVariable("byteVar"))); check("decimalVar", new BigDecimal(BigInteger.valueOf(1000125), 3)); check("integerVar", 1000); check("longVar", 1000000000000L); check("numberVar", 1000.5); check("stringVar", "hello"); check("dateVar", new Date(5000)); // null value Boolean[] someValue = new Boolean[graph.getDataRecordMetadata(INPUT_1).getNumFields()]; Arrays.fill(someValue, Boolean.FALSE); check("someValue", Arrays.asList(someValue)); Boolean[] nullValue = new Boolean[graph.getDataRecordMetadata(INPUT_1).getNumFields()]; Arrays.fill(nullValue, Boolean.TRUE); check("nullValue", Arrays.asList(nullValue)); String[] asString2 = new String[graph.getDataRecordMetadata(INPUT_1).getNumFields()]; check("asString2", Arrays.asList(asString2)); Boolean[] isNull2 = new Boolean[graph.getDataRecordMetadata(INPUT_1).getNumFields()]; Arrays.fill(isNull2, Boolean.TRUE); check("isNull2", Arrays.asList(isNull2)); } public void test_dynamic_get_set_loop() { test_dynamic_get_set_loop("test_dynamic_get_set_loop"); } public void test_dynamic_get_set_loop_alternative() { test_dynamic_get_set_loop("test_dynamic_get_set_loop_alternative"); } public void test_dynamic_invalid() { doCompileExpectErrors("test_dynamic_invalid", Arrays.asList( "Input record cannot be assigned to", "Input record cannot be assigned to" )); } public void test_return_constants() { // test case for issue 2257 System.out.println("Return constants test:"); doCompile("test_return_constants"); check("skip", RecordTransform.SKIP); check("all", RecordTransform.ALL); check("ok", NORMALIZE_RETURN_OK); check("stop", RecordTransform.STOP); } public void test_raise_error_terminal() { // test case for issue 2337 doCompile("test_raise_error_terminal"); } public void test_raise_error_nonliteral() { // test case for issue CL-2071 doCompile("test_raise_error_nonliteral"); } public void test_case_unique_check() { // test case for issue 2515 doCompileExpectErrors("test_case_unique_check", Arrays.asList("Duplicate case", "Duplicate case")); } public void test_case_unique_check2() { // test case for issue 2515 doCompileExpectErrors("test_case_unique_check2", Arrays.asList("Duplicate case", "Duplicate case")); } public void test_case_unique_check3() { doCompileExpectError("test_case_unique_check3", "Default case is already defined"); } public void test_rvalue_for_append() { // test case for issue 3956 doCompile("test_rvalue_for_append"); check("a", Arrays.asList("1", "2")); check("b", Arrays.asList("a", "b", "c")); check("c", Arrays.asList("1", "2", "a", "b", "c")); } public void test_rvalue_for_map_append() { // test case for issue 3960 doCompile("test_rvalue_for_map_append"); HashMap<Integer, String> map1instance = new HashMap<Integer, String>(); map1instance.put(1, "a"); map1instance.put(2, "b"); HashMap<Integer, String> map2instance = new HashMap<Integer, String>(); map2instance.put(3, "c"); map2instance.put(4, "d"); HashMap<Integer, String> map3instance = new HashMap<Integer, String>(); map3instance.put(1, "a"); map3instance.put(2, "b"); map3instance.put(3, "c"); map3instance.put(4, "d"); check("map1", map1instance); check("map2", map2instance); check("map3", map3instance); } public void test_global_field_access() { // test case for issue 3957 doCompileExpectError("test_global_field_access", "Unable to access record field in global scope"); } public void test_global_scope() { // test case for issue 5006 doCompile("test_global_scope"); check("len", "Kokon".length()); } //TODO Implement /*public void test_new() { doCompile("test_new"); }*/ public void test_parser() { System.out.println("\nParser test:"); doCompile("test_parser"); } public void test_ref_res_import() { System.out.println("\nSpecial character resolving (import) test:"); URL importLoc = getClass().getSuperclass().getResource("test_ref_res.ctl"); String expStr = "import '" + importLoc + "';\n"; doCompile(expStr, "test_ref_res_import"); } public void test_ref_res_noimport() { System.out.println("\nSpecial character resolving (no import) test:"); doCompile("test_ref_res"); } public void test_import() { System.out.println("\nImport test:"); URL importLoc = getClass().getSuperclass().getResource("import.ctl"); String expStr = "import '" + importLoc + "';\n"; importLoc = getClass().getSuperclass().getResource("other.ctl"); expStr += "import '" + importLoc + "';\n" + "integer sumInt;\n" + "function integer transform() {\n" + " if (a == 3) {\n" + " otherImportVar++;\n" + " }\n" + " sumInt = sum(a, otherImportVar);\n" + " return 0;\n" + "}\n"; doCompile(expStr, "test_import"); } public void test_scope() throws ComponentNotReadyException, TransformException { System.out.println("\nMapping test:"); // String expStr = // "function string computeSomething(int n) {\n" + // " string s = '';\n" + // " do {\n" + // " int i = n--;\n" + // " s = s + '-' + i;\n" + // " } while (n > 0)\n" + // " return s;" + // "function int transform() {\n" + // " printErr(computeSomething(10));\n" + // " return 0;\n" + URL importLoc = getClass().getSuperclass().getResource("samplecode.ctl"); String expStr = "import '" + importLoc + "';\n"; // "function int getIndexOfOffsetStart(string encodedDate) {\n" + // "int offsetStart;\n" + // "int actualLastMinus;\n" + // "int lastMinus = -1;\n" + // "if ( index_of(encodedDate, '+') != -1 )\n" + // " return index_of(encodedDate, '+');\n" + // "do {\n" + // " actualLastMinus = index_of(encodedDate, '-', lastMinus+1);\n" + // " if ( actualLastMinus != -1 )\n" + // " lastMinus = actualLastMinus;\n" + // "} while ( actualLastMinus != -1 )\n" + // "return lastMinus;\n" + // "function int transform() {\n" + // " getIndexOfOffsetStart('2009-04-24T08:00:00-05:00');\n" + // " return 0;\n" + doCompile(expStr, "test_scope"); } public void test_type_void() { doCompileExpectErrors("test_type_void", Arrays.asList("Syntax error on token 'void'", "Variable 'voidVar' is not declared", "Variable 'voidVar' is not declared", "Syntax error on token 'void'")); } public void test_type_integer() { doCompile("test_type_integer"); check("i", 0); check("j", -1); check("field", VALUE_VALUE); checkNull("nullValue"); check("varWithInitializer", 123); checkNull("varWithNullInitializer"); } public void test_type_integer_edge() { String testExpression = "integer minInt;\n"+ "integer maxInt;\n"+ "function integer transform() {\n" + "minInt=" + Integer.MIN_VALUE + ";\n" + "printErr(minInt, true);\n" + "maxInt=" + Integer.MAX_VALUE + ";\n" + "printErr(maxInt, true);\n" + "return 0;\n" + "}\n"; doCompile(testExpression, "test_int_edge"); check("minInt", Integer.MIN_VALUE); check("maxInt", Integer.MAX_VALUE); } public void test_type_long() { doCompile("test_type_long"); check("i", Long.valueOf(0)); check("j", Long.valueOf(-1)); check("field", BORN_MILLISEC_VALUE); check("def", Long.valueOf(0)); checkNull("nullValue"); check("varWithInitializer", 123L); checkNull("varWithNullInitializer"); } public void test_type_long_edge() { String expStr = "long minLong;\n"+ "long maxLong;\n"+ "function integer transform() {\n" + "minLong=" + (Long.MIN_VALUE) + "L;\n" + "printErr(minLong);\n" + "maxLong=" + (Long.MAX_VALUE) + "L;\n" + "printErr(maxLong);\n" + "return 0;\n" + "}\n"; doCompile(expStr,"test_long_edge"); check("minLong", Long.MIN_VALUE); check("maxLong", Long.MAX_VALUE); } public void test_type_decimal() { doCompile("test_type_decimal"); check("i", new BigDecimal(0, MAX_PRECISION)); check("j", new BigDecimal(-1, MAX_PRECISION)); check("field", CURRENCY_VALUE); check("def", new BigDecimal(0, MAX_PRECISION)); checkNull("nullValue"); check("varWithInitializer", new BigDecimal("123.35", MAX_PRECISION)); checkNull("varWithNullInitializer"); check("varWithInitializerNoDist", new BigDecimal(123.35, MAX_PRECISION)); } public void test_type_decimal_edge() { String testExpression = "decimal minLong;\n"+ "decimal maxLong;\n"+ "decimal minLongNoDist;\n"+ "decimal maxLongNoDist;\n"+ "decimal minDouble;\n"+ "decimal maxDouble;\n"+ "decimal minDoubleNoDist;\n"+ "decimal maxDoubleNoDist;\n"+ "function integer transform() {\n" + "minLong=" + String.valueOf(Long.MIN_VALUE) + "d;\n" + "printErr(minLong);\n" + "maxLong=" + String.valueOf(Long.MAX_VALUE) + "d;\n" + "printErr(maxLong);\n" + "minLongNoDist=" + String.valueOf(Long.MIN_VALUE) + "L;\n" + "printErr(minLongNoDist);\n" + "maxLongNoDist=" + String.valueOf(Long.MAX_VALUE) + "L;\n" + "printErr(maxLongNoDist);\n" + // distincter will cause the double-string be parsed into exact representation within BigDecimal "minDouble=" + String.valueOf(Double.MIN_VALUE) + "D;\n" + "printErr(minDouble);\n" + "maxDouble=" + String.valueOf(Double.MAX_VALUE) + "D;\n" + "printErr(maxDouble);\n" + // no distincter will cause the double-string to be parsed into inexact representation within double // then to be assigned into BigDecimal (which will extract only MAX_PRECISION digits) "minDoubleNoDist=" + String.valueOf(Double.MIN_VALUE) + ";\n" + "printErr(minDoubleNoDist);\n" + "maxDoubleNoDist=" + String.valueOf(Double.MAX_VALUE) + ";\n" + "printErr(maxDoubleNoDist);\n" + "return 0;\n" + "}\n"; doCompile(testExpression, "test_decimal_edge"); check("minLong", new BigDecimal(String.valueOf(Long.MIN_VALUE), MAX_PRECISION)); check("maxLong", new BigDecimal(String.valueOf(Long.MAX_VALUE), MAX_PRECISION)); check("minLongNoDist", new BigDecimal(String.valueOf(Long.MIN_VALUE), MAX_PRECISION)); check("maxLongNoDist", new BigDecimal(String.valueOf(Long.MAX_VALUE), MAX_PRECISION)); // distincter will cause the MIN_VALUE to be parsed into exact representation (i.e. 4.9E-324) check("minDouble", new BigDecimal(String.valueOf(Double.MIN_VALUE), MAX_PRECISION)); check("maxDouble", new BigDecimal(String.valueOf(Double.MAX_VALUE), MAX_PRECISION)); // no distincter will cause MIN_VALUE to be parsed into double inexact representation and extraction of // MAX_PRECISION digits (i.e. 4.94065.....E-324) check("minDoubleNoDist", new BigDecimal(Double.MIN_VALUE, MAX_PRECISION)); check("maxDoubleNoDist", new BigDecimal(Double.MAX_VALUE, MAX_PRECISION)); } public void test_type_number() { doCompile("test_type_number"); check("i", Double.valueOf(0)); check("j", Double.valueOf(-1)); check("field", AGE_VALUE); check("def", Double.valueOf(0)); checkNull("nullValue"); checkNull("varWithNullInitializer"); } public void test_type_number_edge() { String testExpression = "number minDouble;\n" + "number maxDouble;\n"+ "function integer transform() {\n" + "minDouble=" + Double.MIN_VALUE + ";\n" + "printErr(minDouble);\n" + "maxDouble=" + Double.MAX_VALUE + ";\n" + "printErr(maxDouble);\n" + "return 0;\n" + "}\n"; doCompile(testExpression, "test_number_edge"); check("minDouble", Double.valueOf(Double.MIN_VALUE)); check("maxDouble", Double.valueOf(Double.MAX_VALUE)); } public void test_type_string() { doCompile("test_type_string"); check("i","0"); check("helloEscaped", "hello\\nworld"); check("helloExpanded", "hello\nworld"); check("fieldName", NAME_VALUE); check("fieldCity", CITY_VALUE); check("escapeChars", "a\u0101\u0102A"); check("doubleEscapeChars", "a\\u0101\\u0102A"); check("specialChars", "špeciálne značky s mäkčeňom môžu byť"); check("dQescapeChars", "a\u0101\u0102A"); //TODO:Is next test correct? check("dQdoubleEscapeChars", "a\\u0101\\u0102A"); check("dQspecialChars", "špeciálne značky s mäkčeňom môžu byť"); check("empty", ""); check("def", ""); checkNull("varWithNullInitializer"); } public void test_type_string_long() { int length = 1000; StringBuilder tmp = new StringBuilder(length); for (int i = 0; i < length; i++) { tmp.append(i % 10); } String testExpression = "string longString;\n" + "function integer transform() {\n" + "longString=\"" + tmp + "\";\n" + "printErr(longString);\n" + "return 0;\n" + "}\n"; doCompile(testExpression, "test_string_long"); check("longString", String.valueOf(tmp)); } public void test_type_date() throws Exception { doCompile("test_type_date"); check("d3", new GregorianCalendar(2006, GregorianCalendar.AUGUST, 1).getTime()); check("d2", new GregorianCalendar(2006, GregorianCalendar.AUGUST, 2, 15, 15, 3).getTime()); check("d1", new GregorianCalendar(2006, GregorianCalendar.JANUARY, 1, 1, 2, 3).getTime()); check("field", BORN_VALUE); checkNull("nullValue"); check("minValue", new GregorianCalendar(1970, GregorianCalendar.JANUARY, 1, 1, 0, 0).getTime()); checkNull("varWithNullInitializer"); // test with a default time zone set on the GraphRuntimeContext Context context = null; try { tearDown(); setUp(); TransformationGraph graph = new TransformationGraph(); graph.getRuntimeContext().setTimeZone("GMT+8"); context = ContextProvider.registerGraph(graph); doCompile("test_type_date"); Calendar calendar = new GregorianCalendar(2006, GregorianCalendar.AUGUST, 2, 15, 15, 3); calendar.setTimeZone(TimeZone.getTimeZone("GMT+8")); check("d2", calendar.getTime()); calendar.set(2006, 0, 1, 1, 2, 3); check("d1", calendar.getTime()); } finally { ContextProvider.unregister(context); } } public void test_type_boolean() { doCompile("test_type_boolean"); check("b1", true); check("b2", false); check("b3", false); checkNull("nullValue"); checkNull("varWithNullInitializer"); } public void test_type_boolean_compare() { doCompileExpectErrors("test_type_boolean_compare", Arrays.asList( "Operator '>' is not defined for types 'boolean' and 'boolean'", "Operator '>=' is not defined for types 'boolean' and 'boolean'", "Operator '<' is not defined for types 'boolean' and 'boolean'", "Operator '<=' is not defined for types 'boolean' and 'boolean'", "Operator '<' is not defined for types 'boolean' and 'boolean'", "Operator '>' is not defined for types 'boolean' and 'boolean'", "Operator '>=' is not defined for types 'boolean' and 'boolean'", "Operator '<=' is not defined for types 'boolean' and 'boolean'")); } public void test_type_list() { doCompile("test_type_list"); check("intList", Arrays.asList(1, 2, 3, 4, 5, 6)); check("intList2", Arrays.asList(1, 2, 3)); check("stringList", Arrays.asList( "first", "replaced", "third", "fourth", "fifth", "sixth", "extra")); check("stringListCopy", Arrays.asList( "first", "second", "third", "fourth", "fifth", "seventh")); check("stringListCopy2", Arrays.asList( "first", "replaced", "third", "fourth", "fifth", "sixth", "extra")); assertTrue(getVariable("stringList") != getVariable("stringListCopy")); assertEquals(getVariable("stringList"), getVariable("stringListCopy2")); assertEquals(Arrays.asList(false, null, true), getVariable("booleanList")); assertDeepEquals(Arrays.asList(new byte[] {(byte) 0xAB}, null), getVariable("byteList")); assertDeepEquals(Arrays.asList(null, new byte[] {(byte) 0xCD}), getVariable("cbyteList")); assertEquals(Arrays.asList(new Date(12000), null, new Date(34000)), getVariable("dateList")); assertEquals(Arrays.asList(null, new BigDecimal(BigInteger.valueOf(1234), 2)), getVariable("decimalList")); assertEquals(Arrays.asList(12, null, 34), getVariable("intList3")); assertEquals(Arrays.asList(12l, null, 98l), getVariable("longList")); assertEquals(Arrays.asList(12.34, null, 56.78), getVariable("numberList")); assertEquals(Arrays.asList("aa", null, "bb"), getVariable("stringList2")); List<?> decimalList2 = (List<?>) getVariable("decimalList2"); for (Object o: decimalList2) { assertTrue(o instanceof BigDecimal); } List<?> intList4 = (List<?>) getVariable("intList4"); Set<Object> intList4Set = new HashSet<Object>(intList4); assertEquals(3, intList4Set.size()); } public void test_type_list_field() { doCompile("test_type_list_field"); check("copyByValueTest1", "2"); check("copyByValueTest2", "test"); } public void test_type_map_field() { doCompile("test_type_map_field"); Integer copyByValueTest1 = (Integer) getVariable("copyByValueTest1"); assertEquals(new Integer(2), copyByValueTest1); Integer copyByValueTest2 = (Integer) getVariable("copyByValueTest2"); assertEquals(new Integer(100), copyByValueTest2); } /** * The structure of the objects must be exactly the same! * * @param o1 * @param o2 */ private static void assertDeepCopy(Object o1, Object o2) { if (o1 instanceof DataRecord) { assertFalse(o1 == o2); DataRecord r1 = (DataRecord) o1; DataRecord r2 = (DataRecord) o2; for (int i = 0; i < r1.getNumFields(); i++) { assertDeepCopy(r1.getField(i).getValue(), r2.getField(i).getValue()); } } else if (o1 instanceof Map) { assertFalse(o1 == o2); Map<?, ?> m1 = (Map<?, ?>) o1; Map<?, ?> m2 = (Map<?, ?>) o2; for (Object key: m1.keySet()) { assertDeepCopy(m1.get(key), m2.get(key)); } } else if (o1 instanceof List) { assertFalse(o1 == o2); List<?> l1 = (List<?>) o1; List<?> l2 = (List<?>) o2; for (int i = 0; i < l1.size(); i++) { assertDeepCopy(l1.get(i), l2.get(i)); } } else if (o1 instanceof Date) { assertFalse(o1 == o2); // } else if (o1 instanceof byte[]) { // not required anymore // assertFalse(o1 == o2); } } /** * The structure of the objects must be exactly the same! * * @param o1 * @param o2 */ private static void assertDeepEquals(Object o1, Object o2) { if ((o1 == null) && (o2 == null)) { return; } assertTrue((o1 == null) == (o2 == null)); if (o1 instanceof DataRecord) { DataRecord r1 = (DataRecord) o1; DataRecord r2 = (DataRecord) o2; assertEquals(r1.getNumFields(), r2.getNumFields()); for (int i = 0; i < r1.getNumFields(); i++) { assertDeepEquals(r1.getField(i).getValue(), r2.getField(i).getValue()); } } else if (o1 instanceof Map) { Map<?, ?> m1 = (Map<?, ?>) o1; Map<?, ?> m2 = (Map<?, ?>) o2; assertTrue(m1.keySet().equals(m2.keySet())); for (Object key: m1.keySet()) { assertDeepEquals(m1.get(key), m2.get(key)); } } else if (o1 instanceof List) { List<?> l1 = (List<?>) o1; List<?> l2 = (List<?>) o2; assertEquals("size", l1.size(), l2.size()); for (int i = 0; i < l1.size(); i++) { assertDeepEquals(l1.get(i), l2.get(i)); } } else if (o1 instanceof byte[]) { byte[] b1 = (byte[]) o1; byte[] b2 = (byte[]) o2; if (b1 != b2) { if (b1 == null || b2 == null) { assertEquals(b1, b2); } assertEquals("length", b1.length, b2.length); for (int i = 0; i < b1.length; i++) { assertEquals(String.format("[%d]", i), b1[i], b2[i]); } } } else if (o1 instanceof CharSequence) { String s1 = ((CharSequence) o1).toString(); String s2 = ((CharSequence) o2).toString(); assertEquals(s1, s2); } else if ((o1 instanceof Decimal) || (o1 instanceof BigDecimal)) { BigDecimal d1 = o1 instanceof Decimal ? ((Decimal) o1).getBigDecimalOutput() : (BigDecimal) o1; BigDecimal d2 = o2 instanceof Decimal ? ((Decimal) o2).getBigDecimalOutput() : (BigDecimal) o2; assertEquals(d1, d2); } else { assertEquals(o1, o2); } } private void check_assignment_deepcopy_variable_declaration() { Date testVariableDeclarationDate1 = (Date) getVariable("testVariableDeclarationDate1"); Date testVariableDeclarationDate2 = (Date) getVariable("testVariableDeclarationDate2"); byte[] testVariableDeclarationByte1 = (byte[]) getVariable("testVariableDeclarationByte1"); byte[] testVariableDeclarationByte2 = (byte[]) getVariable("testVariableDeclarationByte2"); assertDeepEquals(testVariableDeclarationDate1, testVariableDeclarationDate2); assertDeepEquals(testVariableDeclarationByte1, testVariableDeclarationByte2); assertDeepCopy(testVariableDeclarationDate1, testVariableDeclarationDate2); assertDeepCopy(testVariableDeclarationByte1, testVariableDeclarationByte2); } @SuppressWarnings("unchecked") private void check_assignment_deepcopy_array_access_expression() { { // JJTARRAYACCESSEXPRESSION - List List<String> stringListField1 = (List<String>) getVariable("stringListField1"); DataRecord recordInList1 = (DataRecord) getVariable("recordInList1"); List<DataRecord> recordList1 = (List<DataRecord>) getVariable("recordList1"); List<DataRecord> recordList2 = (List<DataRecord>) getVariable("recordList2"); assertDeepEquals(stringListField1, recordInList1.getField("stringListField").getValue()); assertDeepEquals(recordInList1, recordList1.get(0)); assertDeepEquals(recordList1, recordList2); assertDeepCopy(stringListField1, recordInList1.getField("stringListField").getValue()); assertDeepCopy(recordInList1, recordList1.get(0)); assertDeepCopy(recordList1, recordList2); } { // map of records Date testDate1 = (Date) getVariable("testDate1"); Map<Integer, DataRecord> recordMap1 = (Map<Integer, DataRecord>) getVariable("recordMap1"); DataRecord recordInMap1 = (DataRecord) getVariable("recordInMap1"); DataRecord recordInMap2 = (DataRecord) getVariable("recordInMap2"); Map<Integer, DataRecord> recordMap2 = (Map<Integer, DataRecord>) getVariable("recordMap2"); assertDeepEquals(testDate1, recordInMap1.getField("dateField").getValue()); assertDeepEquals(recordInMap1, recordMap1.get(0)); assertDeepEquals(recordInMap2, recordMap1.get(0)); assertDeepEquals(recordMap1, recordMap2); assertDeepCopy(testDate1, recordInMap1.getField("dateField").getValue()); assertDeepCopy(recordInMap1, recordMap1.get(0)); assertDeepCopy(recordInMap2, recordMap1.get(0)); assertDeepCopy(recordMap1, recordMap2); } { // map of dates Map<Integer, Date> dateMap1 = (Map<Integer, Date>) getVariable("dateMap1"); Date date1 = (Date) getVariable("date1"); Date date2 = (Date) getVariable("date2"); assertDeepCopy(date1, dateMap1.get(0)); assertDeepCopy(date2, dateMap1.get(1)); } { // map of byte arrays Map<Integer, byte[]> byteMap1 = (Map<Integer, byte[]>) getVariable("byteMap1"); byte[] byte1 = (byte[]) getVariable("byte1"); byte[] byte2 = (byte[]) getVariable("byte2"); assertDeepCopy(byte1, byteMap1.get(0)); assertDeepCopy(byte2, byteMap1.get(1)); } { // JJTARRAYACCESSEXPRESSION - Function call List<String> testArrayAccessFunctionCallStringList = (List<String>) getVariable("testArrayAccessFunctionCallStringList"); DataRecord testArrayAccessFunctionCall = (DataRecord) getVariable("testArrayAccessFunctionCall"); Map<String, DataRecord> function_call_original_map = (Map<String, DataRecord>) getVariable("function_call_original_map"); Map<String, DataRecord> function_call_copied_map = (Map<String, DataRecord>) getVariable("function_call_copied_map"); List<DataRecord> function_call_original_list = (List<DataRecord>) getVariable("function_call_original_list"); List<DataRecord> function_call_copied_list = (List<DataRecord>) getVariable("function_call_copied_list"); assertDeepEquals(testArrayAccessFunctionCallStringList, testArrayAccessFunctionCall.getField("stringListField").getValue()); assertEquals(1, function_call_original_map.size()); assertEquals(2, function_call_copied_map.size()); assertDeepEquals(Arrays.asList(null, testArrayAccessFunctionCall), function_call_original_list); assertDeepEquals(Arrays.asList(null, testArrayAccessFunctionCall, testArrayAccessFunctionCall), function_call_copied_list); assertDeepEquals(testArrayAccessFunctionCall, function_call_original_map.get("1")); assertDeepEquals(testArrayAccessFunctionCall, function_call_copied_map.get("1")); assertDeepEquals(testArrayAccessFunctionCall, function_call_copied_map.get("2")); assertDeepEquals(testArrayAccessFunctionCall, function_call_original_list.get(1)); assertDeepEquals(testArrayAccessFunctionCall, function_call_copied_list.get(1)); assertDeepEquals(testArrayAccessFunctionCall, function_call_copied_list.get(2)); assertDeepCopy(testArrayAccessFunctionCall, function_call_original_map.get("1")); assertDeepCopy(testArrayAccessFunctionCall, function_call_copied_map.get("1")); assertDeepCopy(testArrayAccessFunctionCall, function_call_copied_map.get("2")); assertDeepCopy(testArrayAccessFunctionCall, function_call_original_list.get(1)); assertDeepCopy(testArrayAccessFunctionCall, function_call_copied_list.get(1)); assertDeepCopy(testArrayAccessFunctionCall, function_call_copied_list.get(2)); } } @SuppressWarnings("unchecked") private void check_assignment_deepcopy_field_access_expression() { // field access Date testFieldAccessDate1 = (Date) getVariable("testFieldAccessDate1"); String testFieldAccessString1 = (String) getVariable("testFieldAccessString1"); List<Date> testFieldAccessDateList1 = (List<Date>) getVariable("testFieldAccessDateList1"); List<String> testFieldAccessStringList1 = (List<String>) getVariable("testFieldAccessStringList1"); Map<String, Date> testFieldAccessDateMap1 = (Map<String, Date>) getVariable("testFieldAccessDateMap1"); Map<String, String> testFieldAccessStringMap1 = (Map<String, String>) getVariable("testFieldAccessStringMap1"); DataRecord testFieldAccessRecord1 = (DataRecord) getVariable("testFieldAccessRecord1"); DataRecord firstMultivalueOutput = outputRecords[4]; DataRecord secondMultivalueOutput = outputRecords[5]; DataRecord thirdMultivalueOutput = outputRecords[6]; assertDeepEquals(testFieldAccessDate1, firstMultivalueOutput.getField("dateField").getValue()); assertDeepEquals(testFieldAccessDate1, ((List<?>) firstMultivalueOutput.getField("dateListField").getValue()).get(0)); assertDeepEquals(testFieldAccessString1, ((List<?>) firstMultivalueOutput.getField("stringListField").getValue()).get(0)); assertDeepEquals(testFieldAccessDate1, ((Map<?, ?>) firstMultivalueOutput.getField("dateMapField").getValue()).get("first")); assertDeepEquals(testFieldAccessString1, ((Map<?, ?>) firstMultivalueOutput.getField("stringMapField").getValue()).get("first")); assertDeepEquals(testFieldAccessDateList1, secondMultivalueOutput.getField("dateListField").getValue()); assertDeepEquals(testFieldAccessStringList1, secondMultivalueOutput.getField("stringListField").getValue()); assertDeepEquals(testFieldAccessDateMap1, secondMultivalueOutput.getField("dateMapField").getValue()); assertDeepEquals(testFieldAccessStringMap1, secondMultivalueOutput.getField("stringMapField").getValue()); assertDeepEquals(testFieldAccessRecord1, thirdMultivalueOutput); assertDeepCopy(testFieldAccessDate1, firstMultivalueOutput.getField("dateField").getValue()); assertDeepCopy(testFieldAccessDate1, ((List<?>) firstMultivalueOutput.getField("dateListField").getValue()).get(0)); assertDeepCopy(testFieldAccessString1, ((List<?>) firstMultivalueOutput.getField("stringListField").getValue()).get(0)); assertDeepCopy(testFieldAccessDate1, ((Map<?, ?>) firstMultivalueOutput.getField("dateMapField").getValue()).get("first")); assertDeepCopy(testFieldAccessString1, ((Map<?, ?>) firstMultivalueOutput.getField("stringMapField").getValue()).get("first")); assertDeepCopy(testFieldAccessDateList1, secondMultivalueOutput.getField("dateListField").getValue()); assertDeepCopy(testFieldAccessStringList1, secondMultivalueOutput.getField("stringListField").getValue()); assertDeepCopy(testFieldAccessDateMap1, secondMultivalueOutput.getField("dateMapField").getValue()); assertDeepCopy(testFieldAccessStringMap1, secondMultivalueOutput.getField("stringMapField").getValue()); assertDeepCopy(testFieldAccessRecord1, thirdMultivalueOutput); } @SuppressWarnings("unchecked") private void check_assignment_deepcopy_member_access_expression() { { // member access - record Date testMemberAccessDate1 = (Date) getVariable("testMemberAccessDate1"); byte[] testMemberAccessByte1 = (byte[]) getVariable("testMemberAccessByte1"); List<Date> testMemberAccessDateList1 = (List<Date>) getVariable("testMemberAccessDateList1"); List<byte[]> testMemberAccessByteList1 = (List<byte[]>) getVariable("testMemberAccessByteList1"); DataRecord testMemberAccessRecord1 = (DataRecord) getVariable("testMemberAccessRecord1"); DataRecord testMemberAccessRecord2 = (DataRecord) getVariable("testMemberAccessRecord2"); assertDeepEquals(testMemberAccessDate1, testMemberAccessRecord1.getField("dateField").getValue()); assertDeepEquals(testMemberAccessByte1, testMemberAccessRecord1.getField("byteField").getValue()); assertDeepEquals(testMemberAccessDate1, ((List<?>) testMemberAccessRecord1.getField("dateListField").getValue()).get(0)); assertDeepEquals(testMemberAccessByte1, ((List<?>) testMemberAccessRecord1.getField("byteListField").getValue()).get(0)); assertDeepEquals(testMemberAccessDateList1, testMemberAccessRecord2.getField("dateListField").getValue()); assertDeepEquals(testMemberAccessByteList1, testMemberAccessRecord2.getField("byteListField").getValue()); assertDeepCopy(testMemberAccessDate1, testMemberAccessRecord1.getField("dateField").getValue()); assertDeepCopy(testMemberAccessByte1, testMemberAccessRecord1.getField("byteField").getValue()); assertDeepCopy(testMemberAccessDate1, ((List<?>) testMemberAccessRecord1.getField("dateListField").getValue()).get(0)); assertDeepCopy(testMemberAccessByte1, ((List<?>) testMemberAccessRecord1.getField("byteListField").getValue()).get(0)); assertDeepCopy(testMemberAccessDateList1, testMemberAccessRecord2.getField("dateListField").getValue()); assertDeepCopy(testMemberAccessByteList1, testMemberAccessRecord2.getField("byteListField").getValue()); } { // member access - record Date testMemberAccessDate1 = (Date) getVariable("testMemberAccessDate1"); byte[] testMemberAccessByte1 = (byte[]) getVariable("testMemberAccessByte1"); List<Date> testMemberAccessDateList1 = (List<Date>) getVariable("testMemberAccessDateList1"); List<byte[]> testMemberAccessByteList1 = (List<byte[]>) getVariable("testMemberAccessByteList1"); DataRecord testMemberAccessRecord1 = (DataRecord) getVariable("testMemberAccessRecord1"); DataRecord testMemberAccessRecord2 = (DataRecord) getVariable("testMemberAccessRecord2"); DataRecord testMemberAccessRecord3 = (DataRecord) getVariable("testMemberAccessRecord3"); assertDeepEquals(testMemberAccessDate1, testMemberAccessRecord1.getField("dateField").getValue()); assertDeepEquals(testMemberAccessByte1, testMemberAccessRecord1.getField("byteField").getValue()); assertDeepEquals(testMemberAccessDate1, ((List<?>) testMemberAccessRecord1.getField("dateListField").getValue()).get(0)); assertDeepEquals(testMemberAccessByte1, ((List<?>) testMemberAccessRecord1.getField("byteListField").getValue()).get(0)); assertDeepEquals(testMemberAccessDateList1, testMemberAccessRecord2.getField("dateListField").getValue()); assertDeepEquals(testMemberAccessByteList1, testMemberAccessRecord2.getField("byteListField").getValue()); assertDeepEquals(testMemberAccessRecord3, testMemberAccessRecord2); assertDeepCopy(testMemberAccessDate1, testMemberAccessRecord1.getField("dateField").getValue()); assertDeepCopy(testMemberAccessByte1, testMemberAccessRecord1.getField("byteField").getValue()); assertDeepCopy(testMemberAccessDate1, ((List<?>) testMemberAccessRecord1.getField("dateListField").getValue()).get(0)); assertDeepCopy(testMemberAccessByte1, ((List<?>) testMemberAccessRecord1.getField("byteListField").getValue()).get(0)); assertDeepCopy(testMemberAccessDateList1, testMemberAccessRecord2.getField("dateListField").getValue()); assertDeepCopy(testMemberAccessByteList1, testMemberAccessRecord2.getField("byteListField").getValue()); assertDeepCopy(testMemberAccessRecord3, testMemberAccessRecord2); // dictionary Date dictionaryDate = (Date) graph.getDictionary().getEntry("a").getValue(); byte[] dictionaryByte = (byte[]) graph.getDictionary().getEntry("y").getValue(); List<String> testMemberAccessStringList1 = (List<String>) getVariable("testMemberAccessStringList1"); List<Date> testMemberAccessDateList2 = (List<Date>) getVariable("testMemberAccessDateList2"); List<byte[]> testMemberAccessByteList2 = (List<byte[]>) getVariable("testMemberAccessByteList2"); List<String> dictionaryStringList = (List<String>) graph.getDictionary().getValue("stringList"); List<Date> dictionaryDateList = (List<Date>) graph.getDictionary().getValue("dateList"); List<byte[]> dictionaryByteList = (List<byte[]>) graph.getDictionary().getValue("byteList"); assertDeepEquals(dictionaryDate, testMemberAccessDate1); assertDeepEquals(dictionaryByte, testMemberAccessByte1); assertDeepEquals(dictionaryStringList, testMemberAccessStringList1); assertDeepEquals(dictionaryDateList, testMemberAccessDateList2); assertDeepEquals(dictionaryByteList, testMemberAccessByteList2); assertDeepCopy(dictionaryDate, testMemberAccessDate1); assertDeepCopy(dictionaryByte, testMemberAccessByte1); assertDeepCopy(dictionaryStringList, testMemberAccessStringList1); assertDeepCopy(dictionaryDateList, testMemberAccessDateList2); assertDeepCopy(dictionaryByteList, testMemberAccessByteList2); // member access - array of records List<DataRecord> testMemberAccessRecordList1 = (List<DataRecord>) getVariable("testMemberAccessRecordList1"); assertDeepEquals(testMemberAccessDate1, testMemberAccessRecordList1.get(0).getField("dateField").getValue()); assertDeepEquals(testMemberAccessByte1, testMemberAccessRecordList1.get(0).getField("byteField").getValue()); assertDeepEquals(testMemberAccessDate1, ((List<Date>) testMemberAccessRecordList1.get(0).getField("dateListField").getValue()).get(0)); assertDeepEquals(testMemberAccessByte1, ((List<byte[]>) testMemberAccessRecordList1.get(0).getField("byteListField").getValue()).get(0)); assertDeepEquals(testMemberAccessDateList1, testMemberAccessRecordList1.get(1).getField("dateListField").getValue()); assertDeepEquals(testMemberAccessByteList1, testMemberAccessRecordList1.get(1).getField("byteListField").getValue()); assertDeepEquals(testMemberAccessRecordList1.get(1), testMemberAccessRecordList1.get(2)); assertDeepCopy(testMemberAccessDate1, testMemberAccessRecordList1.get(0).getField("dateField").getValue()); assertDeepCopy(testMemberAccessByte1, testMemberAccessRecordList1.get(0).getField("byteField").getValue()); assertDeepCopy(testMemberAccessDate1, ((List<Date>) testMemberAccessRecordList1.get(0).getField("dateListField").getValue()).get(0)); assertDeepCopy(testMemberAccessByte1, ((List<byte[]>) testMemberAccessRecordList1.get(0).getField("byteListField").getValue()).get(0)); assertDeepCopy(testMemberAccessDateList1, testMemberAccessRecordList1.get(1).getField("dateListField").getValue()); assertDeepCopy(testMemberAccessByteList1, testMemberAccessRecordList1.get(1).getField("byteListField").getValue()); assertDeepCopy(testMemberAccessRecordList1.get(1), testMemberAccessRecordList1.get(2)); // member access - map of records Map<Integer, DataRecord> testMemberAccessRecordMap1 = (Map<Integer, DataRecord>) getVariable("testMemberAccessRecordMap1"); assertDeepEquals(testMemberAccessDate1, testMemberAccessRecordMap1.get(0).getField("dateField").getValue()); assertDeepEquals(testMemberAccessByte1, testMemberAccessRecordMap1.get(0).getField("byteField").getValue()); assertDeepEquals(testMemberAccessDate1, ((List<Date>) testMemberAccessRecordMap1.get(0).getField("dateListField").getValue()).get(0)); assertDeepEquals(testMemberAccessByte1, ((List<byte[]>) testMemberAccessRecordMap1.get(0).getField("byteListField").getValue()).get(0)); assertDeepEquals(testMemberAccessDateList1, testMemberAccessRecordMap1.get(1).getField("dateListField").getValue()); assertDeepEquals(testMemberAccessByteList1, testMemberAccessRecordMap1.get(1).getField("byteListField").getValue()); assertDeepEquals(testMemberAccessRecordMap1.get(1), testMemberAccessRecordMap1.get(2)); assertDeepCopy(testMemberAccessDate1, testMemberAccessRecordMap1.get(0).getField("dateField").getValue()); assertDeepCopy(testMemberAccessByte1, testMemberAccessRecordMap1.get(0).getField("byteField").getValue()); assertDeepCopy(testMemberAccessDate1, ((List<Date>) testMemberAccessRecordMap1.get(0).getField("dateListField").getValue()).get(0)); assertDeepCopy(testMemberAccessByte1, ((List<byte[]>) testMemberAccessRecordMap1.get(0).getField("byteListField").getValue()).get(0)); assertDeepCopy(testMemberAccessDateList1, testMemberAccessRecordMap1.get(1).getField("dateListField").getValue()); assertDeepCopy(testMemberAccessByteList1, testMemberAccessRecordMap1.get(1).getField("byteListField").getValue()); assertDeepCopy(testMemberAccessRecordMap1.get(1), testMemberAccessRecordMap1.get(2)); } } @SuppressWarnings("unchecked") public void test_assignment_deepcopy() { doCompile("test_assignment_deepcopy"); List<DataRecord> secondRecordList = (List<DataRecord>) getVariable("secondRecordList"); assertEquals("before", secondRecordList.get(0).getField("Name").getValue().toString()); List<DataRecord> firstRecordList = (List<DataRecord>) getVariable("firstRecordList"); assertEquals("after", firstRecordList.get(0).getField("Name").getValue().toString()); check_assignment_deepcopy_variable_declaration(); check_assignment_deepcopy_array_access_expression(); check_assignment_deepcopy_field_access_expression(); check_assignment_deepcopy_member_access_expression(); } public void test_assignment_deepcopy_field_access_expression() { doCompile("test_assignment_deepcopy_field_access_expression"); DataRecord testFieldAccessRecord1 = (DataRecord) getVariable("testFieldAccessRecord1"); DataRecord firstMultivalueOutput = outputRecords[4]; DataRecord secondMultivalueOutput = outputRecords[5]; DataRecord thirdMultivalueOutput = outputRecords[6]; DataRecord multivalueInput = inputRecords[3]; assertDeepEquals(firstMultivalueOutput, testFieldAccessRecord1); assertDeepEquals(secondMultivalueOutput, multivalueInput); assertDeepEquals(thirdMultivalueOutput, secondMultivalueOutput); assertDeepCopy(firstMultivalueOutput, testFieldAccessRecord1); assertDeepCopy(secondMultivalueOutput, multivalueInput); assertDeepCopy(thirdMultivalueOutput, secondMultivalueOutput); } public void test_assignment_array_access_function_call() { doCompile("test_assignment_array_access_function_call"); Map<String, String> originalMap = new HashMap<String, String>(); originalMap.put("a", "b"); Map<String, String> copiedMap = new HashMap<String, String>(originalMap); copiedMap.put("c", "d"); check("originalMap", originalMap); check("copiedMap", copiedMap); } public void test_assignment_array_access_function_call_wrong_type() { doCompileExpectErrors("test_assignment_array_access_function_call_wrong_type", Arrays.asList( "Expression is not a composite type but is resolved to 'string'", "Type mismatch: cannot convert from 'integer' to 'string'", "Cannot convert from 'integer' to string" )); } @SuppressWarnings("unchecked") public void test_assignment_returnvalue() { doCompile("test_assignment_returnvalue"); { List<String> stringList1 = (List<String>) getVariable("stringList1"); List<String> stringList2 = (List<String>) getVariable("stringList2"); List<String> stringList3 = (List<String>) getVariable("stringList3"); List<DataRecord> recordList1 = (List<DataRecord>) getVariable("recordList1"); Map<Integer, DataRecord> recordMap1 = (Map<Integer, DataRecord>) getVariable("recordMap1"); List<String> stringList4 = (List<String>) getVariable("stringList4"); Map<String, Integer> integerMap1 = (Map<String, Integer>) getVariable("integerMap1"); DataRecord record1 = (DataRecord) getVariable("record1"); DataRecord record2 = (DataRecord) getVariable("record2"); DataRecord firstMultivalueOutput = outputRecords[4]; DataRecord secondMultivalueOutput = outputRecords[5]; DataRecord thirdMultivalueOutput = outputRecords[6]; Date dictionaryDate1 = (Date) getVariable("dictionaryDate1"); Date dictionaryDate = (Date) graph.getDictionary().getValue("a"); Date zeroDate = new Date(0); List<String> testReturnValueDictionary2 = (List<String>) getVariable("testReturnValueDictionary2"); List<String> dictionaryStringList = (List<String>) graph.getDictionary().getValue("stringList"); List<String> testReturnValue10 = (List<String>) getVariable("testReturnValue10"); DataRecord testReturnValue11 = (DataRecord) getVariable("testReturnValue11"); List<String> testReturnValue12 = (List<String>) getVariable("testReturnValue12"); List<String> testReturnValue13 = (List<String>) getVariable("testReturnValue13"); Map<Integer, DataRecord> function_call_original_map = (Map<Integer, DataRecord>) getVariable("function_call_original_map"); Map<Integer, DataRecord> function_call_copied_map = (Map<Integer, DataRecord>) getVariable("function_call_copied_map"); DataRecord function_call_map_newrecord = (DataRecord) getVariable("function_call_map_newrecord"); List<DataRecord> function_call_original_list = (List<DataRecord>) getVariable("function_call_original_list"); List<DataRecord> function_call_copied_list = (List<DataRecord>) getVariable("function_call_copied_list"); DataRecord function_call_list_newrecord = (DataRecord) getVariable("function_call_list_newrecord"); // identifier assertFalse(stringList1.isEmpty()); assertTrue(stringList2.isEmpty()); assertTrue(stringList3.isEmpty()); // array access expression - list assertDeepEquals("unmodified", recordList1.get(0).getField("stringField").getValue()); assertDeepEquals("modified", recordList1.get(1).getField("stringField").getValue()); // array access expression - map assertDeepEquals("unmodified", recordMap1.get(0).getField("stringField").getValue()); assertDeepEquals("modified", recordMap1.get(1).getField("stringField").getValue()); // array access expression - function call assertDeepEquals(null, function_call_original_map.get(2)); assertDeepEquals("unmodified", function_call_map_newrecord.getField("stringField")); assertDeepEquals("modified", function_call_copied_map.get(2).getField("stringField")); assertDeepEquals(Arrays.asList(null, function_call_list_newrecord), function_call_original_list); assertDeepEquals("unmodified", function_call_list_newrecord.getField("stringField")); assertDeepEquals("modified", function_call_copied_list.get(2).getField("stringField")); // field access expression assertFalse(stringList4.isEmpty()); assertTrue(((List<?>) firstMultivalueOutput.getField("stringListField").getValue()).isEmpty()); assertFalse(integerMap1.isEmpty()); assertTrue(((Map<?, ?>) firstMultivalueOutput.getField("integerMapField").getValue()).isEmpty()); assertDeepEquals("unmodified", record1.getField("stringField")); assertDeepEquals("modified", secondMultivalueOutput.getField("stringField").getValue()); assertDeepEquals("unmodified", record2.getField("stringField")); assertDeepEquals("modified", thirdMultivalueOutput.getField("stringField").getValue()); // member access expression - dictionary // There is no function that could modify a date // assertEquals(zeroDate, dictionaryDate); // assertFalse(zeroDate.equals(testReturnValueDictionary1)); assertFalse(testReturnValueDictionary2.isEmpty()); assertTrue(dictionaryStringList.isEmpty()); // member access expression - record assertFalse(testReturnValue10.isEmpty()); assertTrue(((List<?>) testReturnValue11.getField("stringListField").getValue()).isEmpty()); // member access expression - list of records assertFalse(testReturnValue12.isEmpty()); assertTrue(((List<?>) recordList1.get(2).getField("stringListField").getValue()).isEmpty()); // member access expression - map of records assertFalse(testReturnValue13.isEmpty()); assertTrue(((List<?>) recordMap1.get(2).getField("stringListField").getValue()).isEmpty()); } } @SuppressWarnings("unchecked") public void test_type_map() { doCompile("test_type_map"); Map<String, Integer> testMap = (Map<String, Integer>) getVariable("testMap"); assertEquals(Integer.valueOf(1), testMap.get("zero")); assertEquals(Integer.valueOf(2), testMap.get("one")); assertEquals(Integer.valueOf(3), testMap.get("two")); assertEquals(Integer.valueOf(4), testMap.get("three")); assertEquals(4, testMap.size()); Map<Date, String> dayInWeek = (Map<Date, String>) getVariable("dayInWeek"); Calendar c = Calendar.getInstance(); c.set(2009, Calendar.MARCH, 2, 0, 0, 0); c.set(Calendar.MILLISECOND, 0); assertEquals("Monday", dayInWeek.get(c.getTime())); Map<Date, String> dayInWeekCopy = (Map<Date, String>) getVariable("dayInWeekCopy"); c.set(2009, Calendar.MARCH, 3, 0, 0, 0); c.set(Calendar.MILLISECOND, 0); assertEquals("Tuesday", ((Map<Date, String>) getVariable("tuesday")).get(c.getTime())); assertEquals("Tuesday", dayInWeekCopy.get(c.getTime())); c.set(2009, Calendar.MARCH, 4, 0, 0, 0); c.set(Calendar.MILLISECOND, 0); assertEquals("Wednesday", ((Map<Date, String>) getVariable("wednesday")).get(c.getTime())); assertEquals("Wednesday", dayInWeekCopy.get(c.getTime())); assertFalse(dayInWeek.equals(dayInWeekCopy)); { Map<?, ?> preservedOrder = (Map<?, ?>) getVariable("preservedOrder"); assertEquals(100, preservedOrder.size()); int i = 0; for (Map.Entry<?, ?> entry: preservedOrder.entrySet()) { assertEquals("key" + i, entry.getKey()); assertEquals("value" + i, entry.getValue()); i++; } } } public void test_type_record_list() { doCompile("test_type_record_list"); check("resultInt", 6); check("resultString", "string"); check("resultInt2", 10); check("resultString2", "string2"); } public void test_type_record_list_global() { doCompile("test_type_record_list_global"); check("resultInt", 6); check("resultString", "string"); check("resultInt2", 10); check("resultString2", "string2"); } public void test_type_record_map() { doCompile("test_type_record_map"); check("resultInt", 6); check("resultString", "string"); check("resultInt2", 10); check("resultString2", "string2"); } public void test_type_record_map_global() { doCompile("test_type_record_map_global"); check("resultInt", 6); check("resultString", "string"); check("resultInt2", 10); check("resultString2", "string2"); } public void test_type_record() { doCompile("test_type_record"); // expected result DataRecord expected = createDefaultRecord(createDefaultMetadata("expected")); // simple copy assertTrue(recordEquals(expected, inputRecords[0])); assertTrue(recordEquals(expected, (DataRecord) getVariable("copy"))); // copy and modify expected.getField("Name").setValue("empty"); expected.getField("Value").setValue(321); Calendar c = Calendar.getInstance(); c.set(1987, Calendar.NOVEMBER, 13, 0, 0, 0); c.set(Calendar.MILLISECOND, 0); expected.getField("Born").setValue(c.getTime()); assertTrue(recordEquals(expected, (DataRecord) getVariable("modified"))); // 2x modified copy expected.getField("Name").setValue("not empty"); assertTrue(recordEquals(expected, (DataRecord)getVariable("modified2"))); // no modification by reference is possible assertTrue(recordEquals(expected, (DataRecord)getVariable("modified3"))); expected.getField("Value").setValue(654321); assertTrue(recordEquals(expected, (DataRecord)getVariable("reference"))); assertTrue(getVariable("modified3") != getVariable("reference")); // output record assertTrue(recordEquals(expected, outputRecords[1])); // null record expected.setToNull(); assertTrue(recordEquals(expected, (DataRecord)getVariable("nullRecord"))); } public void test_variables() { doCompile("test_variables"); check("b1", true); check("b2", true); check("b4", "hi"); check("i", 2); } public void test_operator_plus() { doCompile("test_operator_plus"); check("iplusj", 10 + 100); check("lplusm", Long.valueOf(Integer.MAX_VALUE) + Long.valueOf(Integer.MAX_VALUE / 10)); check("mplusl", getVariable("lplusm")); check("mplusi", Long.valueOf(Integer.MAX_VALUE) + 10); check("iplusm", getVariable("mplusi")); check("nplusm1", Double.valueOf(0.1D + 0.001D)); check("nplusj", Double.valueOf(100 + 0.1D)); check("jplusn", getVariable("nplusj")); check("m1plusm", Double.valueOf(Long.valueOf(Integer.MAX_VALUE) + 0.001d)); check("mplusm1", getVariable("m1plusm")); check("dplusd1", new BigDecimal("0.1", MAX_PRECISION).add(new BigDecimal("0.0001", MAX_PRECISION), MAX_PRECISION)); check("dplusj", new BigDecimal(100, MAX_PRECISION).add(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("jplusd", getVariable("dplusj")); check("dplusm", new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION).add(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("mplusd", getVariable("dplusm")); check("dplusn", new BigDecimal("0.1").add(new BigDecimal(0.1D, MAX_PRECISION))); check("nplusd", getVariable("dplusn")); check("spluss1", "hello world"); check("splusj", "hello100"); check("jpluss", "100hello"); check("splusm", "hello" + Long.valueOf(Integer.MAX_VALUE)); check("mpluss", Long.valueOf(Integer.MAX_VALUE) + "hello"); check("splusm1", "hello" + Double.valueOf(0.001D)); check("m1pluss", Double.valueOf(0.001D) + "hello"); check("splusd1", "hello" + new BigDecimal("0.0001")); check("d1pluss", new BigDecimal("0.0001", MAX_PRECISION) + "hello"); } public void test_operator_minus() { doCompile("test_operator_minus"); check("iminusj", 10 - 100); check("lminusm", Long.valueOf(Integer.MAX_VALUE / 10) - Long.valueOf(Integer.MAX_VALUE)); check("mminusi", Long.valueOf(Integer.MAX_VALUE - 10)); check("iminusm", 10 - Long.valueOf(Integer.MAX_VALUE)); check("nminusm1", Double.valueOf(0.1D - 0.001D)); check("nminusj", Double.valueOf(0.1D - 100)); check("jminusn", Double.valueOf(100 - 0.1D)); check("m1minusm", Double.valueOf(0.001D - Long.valueOf(Integer.MAX_VALUE))); check("mminusm1", Double.valueOf(Long.valueOf(Integer.MAX_VALUE) - 0.001D)); check("dminusd1", new BigDecimal("0.1", MAX_PRECISION).subtract(new BigDecimal("0.0001", MAX_PRECISION), MAX_PRECISION)); check("dminusj", new BigDecimal("0.1", MAX_PRECISION).subtract(new BigDecimal(100, MAX_PRECISION), MAX_PRECISION)); check("jminusd", new BigDecimal(100, MAX_PRECISION).subtract(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("dminusm", new BigDecimal("0.1", MAX_PRECISION).subtract(new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION), MAX_PRECISION)); check("mminusd", new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION).subtract(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("dminusn", new BigDecimal("0.1", MAX_PRECISION).subtract(new BigDecimal(0.1D, MAX_PRECISION), MAX_PRECISION)); check("nminusd", new BigDecimal(0.1D, MAX_PRECISION).subtract(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); } public void test_operator_multiply() { doCompile("test_operator_multiply"); check("itimesj", 10 * 100); check("ltimesm", Long.valueOf(Integer.MAX_VALUE) * (Long.valueOf(Integer.MAX_VALUE / 10))); check("mtimesl", getVariable("ltimesm")); check("mtimesi", Long.valueOf(Integer.MAX_VALUE) * 10); check("itimesm", getVariable("mtimesi")); check("ntimesm1", Double.valueOf(0.1D * 0.001D)); check("ntimesj", Double.valueOf(0.1) * 100); check("jtimesn", getVariable("ntimesj")); check("m1timesm", Double.valueOf(0.001d * Long.valueOf(Integer.MAX_VALUE))); check("mtimesm1", getVariable("m1timesm")); check("dtimesd1", new BigDecimal("0.1", MAX_PRECISION).multiply(new BigDecimal("0.0001", MAX_PRECISION), MAX_PRECISION)); check("dtimesj", new BigDecimal("0.1", MAX_PRECISION).multiply(new BigDecimal(100, MAX_PRECISION))); check("jtimesd", getVariable("dtimesj")); check("dtimesm", new BigDecimal("0.1", MAX_PRECISION).multiply(new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION), MAX_PRECISION)); check("mtimesd", getVariable("dtimesm")); check("dtimesn", new BigDecimal("0.1", MAX_PRECISION).multiply(new BigDecimal(0.1, MAX_PRECISION), MAX_PRECISION)); check("ntimesd", getVariable("dtimesn")); } public void test_operator_divide() { doCompile("test_operator_divide"); check("idividej", 10 / 100); check("ldividem", Long.valueOf(Integer.MAX_VALUE / 10) / Long.valueOf(Integer.MAX_VALUE)); check("mdividei", Long.valueOf(Integer.MAX_VALUE / 10)); check("idividem", 10 / Long.valueOf(Integer.MAX_VALUE)); check("ndividem1", Double.valueOf(0.1D / 0.001D)); check("ndividej", Double.valueOf(0.1D / 100)); check("jdividen", Double.valueOf(100 / 0.1D)); check("m1dividem", Double.valueOf(0.001D / Long.valueOf(Integer.MAX_VALUE))); check("mdividem1", Double.valueOf(Long.valueOf(Integer.MAX_VALUE) / 0.001D)); check("ddivided1", new BigDecimal("0.1", MAX_PRECISION).divide(new BigDecimal("0.0001", MAX_PRECISION), MAX_PRECISION)); check("ddividej", new BigDecimal("0.1", MAX_PRECISION).divide(new BigDecimal(100, MAX_PRECISION), MAX_PRECISION)); check("jdivided", new BigDecimal(100, MAX_PRECISION).divide(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("ddividem", new BigDecimal("0.1", MAX_PRECISION).divide(new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION), MAX_PRECISION)); check("mdivided", new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION).divide(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("ddividen", new BigDecimal("0.1", MAX_PRECISION).divide(new BigDecimal(0.1D, MAX_PRECISION), MAX_PRECISION)); check("ndivided", new BigDecimal(0.1D, MAX_PRECISION).divide(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); } public void test_operator_modulus() { doCompile("test_operator_modulus"); check("imoduloj", 10 % 100); check("lmodulom", Long.valueOf(Integer.MAX_VALUE / 10) % Long.valueOf(Integer.MAX_VALUE)); check("mmoduloi", Long.valueOf(Integer.MAX_VALUE % 10)); check("imodulom", 10 % Long.valueOf(Integer.MAX_VALUE)); check("nmodulom1", Double.valueOf(0.1D % 0.001D)); check("nmoduloj", Double.valueOf(0.1D % 100)); check("jmodulon", Double.valueOf(100 % 0.1D)); check("m1modulom", Double.valueOf(0.001D % Long.valueOf(Integer.MAX_VALUE))); check("mmodulom1", Double.valueOf(Long.valueOf(Integer.MAX_VALUE) % 0.001D)); check("dmodulod1", new BigDecimal("0.1", MAX_PRECISION).remainder(new BigDecimal("0.0001", MAX_PRECISION), MAX_PRECISION)); check("dmoduloj", new BigDecimal("0.1", MAX_PRECISION).remainder(new BigDecimal(100, MAX_PRECISION), MAX_PRECISION)); check("jmodulod", new BigDecimal(100, MAX_PRECISION).remainder(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("dmodulom", new BigDecimal("0.1", MAX_PRECISION).remainder(new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION), MAX_PRECISION)); check("mmodulod", new BigDecimal(Long.valueOf(Integer.MAX_VALUE), MAX_PRECISION).remainder(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); check("dmodulon", new BigDecimal("0.1", MAX_PRECISION).remainder(new BigDecimal(0.1D, MAX_PRECISION), MAX_PRECISION)); check("nmodulod", new BigDecimal(0.1D, MAX_PRECISION).remainder(new BigDecimal("0.1", MAX_PRECISION), MAX_PRECISION)); } public void test_operators_unary() { doCompile("test_operators_unary"); // postfix operators // int check("intPlusOrig", Integer.valueOf(10)); check("intPlusPlus", Integer.valueOf(10)); check("intPlus", Integer.valueOf(11)); check("intMinusOrig", Integer.valueOf(10)); check("intMinusMinus", Integer.valueOf(10)); check("intMinus", Integer.valueOf(9)); // long check("longPlusOrig", Long.valueOf(10)); check("longPlusPlus", Long.valueOf(10)); check("longPlus", Long.valueOf(11)); check("longMinusOrig", Long.valueOf(10)); check("longMinusMinus", Long.valueOf(10)); check("longMinus", Long.valueOf(9)); // double check("numberPlusOrig", Double.valueOf(10.1)); check("numberPlusPlus", Double.valueOf(10.1)); check("numberPlus", Double.valueOf(11.1)); check("numberMinusOrig", Double.valueOf(10.1)); check("numberMinusMinus", Double.valueOf(10.1)); check("numberMinus", Double.valueOf(9.1)); // decimal check("decimalPlusOrig", new BigDecimal("10.1")); check("decimalPlusPlus", new BigDecimal("10.1")); check("decimalPlus", new BigDecimal("11.1")); check("decimalMinusOrig", new BigDecimal("10.1")); check("decimalMinusMinus", new BigDecimal("10.1")); check("decimalMinus", new BigDecimal("9.1")); // prefix operators // integer check("plusIntOrig", Integer.valueOf(10)); check("plusPlusInt", Integer.valueOf(11)); check("plusInt", Integer.valueOf(11)); check("minusIntOrig", Integer.valueOf(10)); check("minusMinusInt", Integer.valueOf(9)); check("minusInt", Integer.valueOf(9)); check("unaryInt", Integer.valueOf(-10)); // long check("plusLongOrig", Long.valueOf(10)); check("plusPlusLong", Long.valueOf(11)); check("plusLong", Long.valueOf(11)); check("minusLongOrig", Long.valueOf(10)); check("minusMinusLong", Long.valueOf(9)); check("minusLong", Long.valueOf(9)); check("unaryLong", Long.valueOf(-10)); // double check("plusNumberOrig", Double.valueOf(10.1)); check("plusPlusNumber", Double.valueOf(11.1)); check("plusNumber", Double.valueOf(11.1)); check("minusNumberOrig", Double.valueOf(10.1)); check("minusMinusNumber", Double.valueOf(9.1)); check("minusNumber", Double.valueOf(9.1)); check("unaryNumber", Double.valueOf(-10.1)); // decimal check("plusDecimalOrig", new BigDecimal("10.1")); check("plusPlusDecimal", new BigDecimal("11.1")); check("plusDecimal", new BigDecimal("11.1")); check("minusDecimalOrig", new BigDecimal("10.1")); check("minusMinusDecimal", new BigDecimal("9.1")); check("minusDecimal", new BigDecimal("9.1")); check("unaryDecimal", new BigDecimal("-10.1")); // record values assertEquals(101, ((DataRecord) getVariable("plusPlusRecord")).getField("Value").getValue()); assertEquals(101, ((DataRecord) getVariable("recordPlusPlus")).getField("Value").getValue()); assertEquals(101, ((DataRecord) getVariable("modifiedPlusPlusRecord")).getField("Value").getValue()); assertEquals(101, ((DataRecord) getVariable("modifiedRecordPlusPlus")).getField("Value").getValue()); //record as parameter assertEquals(99, ((DataRecord) getVariable("minusMinusRecord")).getField("Value").getValue()); assertEquals(99, ((DataRecord) getVariable("recordMinusMinus")).getField("Value").getValue()); assertEquals(99, ((DataRecord) getVariable("modifiedMinusMinusRecord")).getField("Value").getValue()); assertEquals(99, ((DataRecord) getVariable("modifiedRecordMinusMinus")).getField("Value").getValue()); // logical not check("booleanValue", true); check("negation", false); check("doubleNegation", true); } public void test_operators_unary_record() { doCompileExpectErrors("test_operators_unary_record", Arrays.asList( "Illegal argument to ++/-- operator", "Illegal argument to ++/-- operator", "Illegal argument to ++/-- operator", "Illegal argument to ++/-- operator", "Input record cannot be assigned to", "Input record cannot be assigned to", "Input record cannot be assigned to", "Input record cannot be assigned to" )); } public void test_operator_equal() { doCompile("test_operator_equal"); check("eq0", true); check("eq1", true); check("eq1a", true); check("eq1b", true); check("eq1c", false); check("eq2", true); check("eq3", true); check("eq4", true); check("eq5", true); check("eq6", false); check("eq7", true); check("eq8", false); check("eq9", true); check("eq10", false); check("eq11", true); check("eq12", false); check("eq13", true); check("eq14", false); check("eq15", false); check("eq16", true); check("eq17", false); check("eq18", false); check("eq19", false); // byte check("eq20", true); check("eq21", true); check("eq22", false); check("eq23", false); check("eq24", true); check("eq25", false); check("eq20c", true); check("eq21c", true); check("eq22c", false); check("eq23c", false); check("eq24c", true); check("eq25c", false); check("eq26", true); check("eq27", true); } public void test_operator_non_equal(){ doCompile("test_operator_non_equal"); check("inei", false); check("inej", true); check("jnei", true); check("jnej", false); check("lnei", false); check("inel", false); check("lnej", true); check("jnel", true); check("lnel", false); check("dnei", false); check("ined", false); check("dnej", true); check("jned", true); check("dnel", false); check("lned", false); check("dned", false); check("dned_different_scale", false); } public void test_operator_in() { doCompile("test_operator_in"); check("a", Integer.valueOf(1)); check("haystack", Collections.EMPTY_LIST); check("needle", Integer.valueOf(2)); check("b1", true); check("b2", false); check("h2", Arrays.asList(2.1D, 2.0D, 2.2D)); check("b3", true); check("h3", Arrays.asList("memento", "mori", "memento mori")); check("n3", "memento mori"); check("b4", true); } public void test_operator_greater_less() { doCompile("test_operator_greater_less"); check("eq1", true); check("eq2", true); check("eq3", true); check("eq4", false); check("eq5", true); check("eq6", false); check("eq7", true); check("eq8", true); check("eq9", true); } public void test_operator_ternary(){ doCompile("test_operator_ternary"); // simple use check("trueValue", true); check("falseValue", false); check("res1", Integer.valueOf(1)); check("res2", Integer.valueOf(2)); // nesting in positive branch check("res3", Integer.valueOf(1)); check("res4", Integer.valueOf(2)); check("res5", Integer.valueOf(3)); // nesting in negative branch check("res6", Integer.valueOf(2)); check("res7", Integer.valueOf(3)); // nesting in both branches check("res8", Integer.valueOf(1)); check("res9", Integer.valueOf(1)); check("res10", Integer.valueOf(2)); check("res11", Integer.valueOf(3)); check("res12", Integer.valueOf(2)); check("res13", Integer.valueOf(4)); check("res14", Integer.valueOf(3)); check("res15", Integer.valueOf(4)); } public void test_operators_logical(){ doCompile("test_operators_logical"); //TODO: please double check this. check("res1", false); check("res2", false); check("res3", true); check("res4", true); check("res5", false); check("res6", false); check("res7", true); check("res8", false); } public void test_regex(){ doCompile("test_regex"); check("eq0", false); check("eq1", true); check("eq2", false); check("eq3", true); check("eq4", false); check("eq5", true); } public void test_if() { doCompile("test_if"); // if with single statement check("cond1", true); check("res1", true); // if with mutliple statements (block) check("cond2", true); check("res21", true); check("res22", true); // else with single statement check("cond3", false); check("res31", false); check("res32", true); // else with multiple statements (block) check("cond4", false); check("res41", false); check("res42", true); check("res43", true); // if with block, else with block check("cond5", false); check("res51", false); check("res52", false); check("res53", true); check("res54", true); // else-if with single statement check("cond61", false); check("cond62", true); check("res61", false); check("res62", true); // else-if with multiple statements check("cond71", false); check("cond72", true); check("res71", false); check("res72", true); check("res73", true); // if-elseif-else test check("cond81", false); check("cond82", false); check("res81", false); check("res82", false); check("res83", true); // if with single statement + inactive else check("cond9", true); check("res91", true); check("res92", false); // if with multiple statements + inactive else with block check("cond10", true); check("res101", true); check("res102", true); check("res103", false); check("res104", false); // if with condition check("i", 0); check("j", 1); check("res11", true); } public void test_switch() { doCompile("test_switch"); // simple switch check("cond1", 1); check("res11", false); check("res12", true); check("res13", false); // switch, no break check("cond2", 1); check("res21", false); check("res22", true); check("res23", true); // default branch check("cond3", 3); check("res31", false); check("res32", false); check("res33", true); // no default branch => no match check("cond4", 3); check("res41", false); check("res42", false); check("res43", false); // multiple statements in a single case-branch check("cond5", 1); check("res51", false); check("res52", true); check("res53", true); check("res54", false); // single statement shared by several case labels check("cond6", 1); check("res61", false); check("res62", true); check("res63", true); check("res64", false); } public void test_int_switch(){ doCompile("test_int_switch"); // simple switch check("cond1", 1); check("res11", true); check("res12", false); check("res13", false); // first case is not followed by a break check("cond2", 1); check("res21", true); check("res22", true); check("res23", false); // first and second case have multiple labels check("cond3", 12); check("res31", false); check("res32", true); check("res33", false); // first and second case have multiple labels and no break after first group check("cond4", 11); check("res41", true); check("res42", true); check("res43", false); // default case intermixed with other case labels in the second group check("cond5", 11); check("res51", true); check("res52", true); check("res53", true); // default case intermixed, with break check("cond6", 16); check("res61", false); check("res62", true); check("res63", false); // continue test check("res7", Arrays.asList( false, false, false, true, true, false, true, true, false, false, true, false, false, true, false, false, false, true)); // return test check("res8", Arrays.asList("0123", "123", "23", "3", "4", "3")); } public void test_non_int_switch(){ doCompile("test_non_int_switch"); // simple switch check("cond1", "1"); check("res11", true); check("res12", false); check("res13", false); // first case is not followed by a break check("cond2", "1"); check("res21", true); check("res22", true); check("res23", false); // first and second case have multiple labels check("cond3", "12"); check("res31", false); check("res32", true); check("res33", false); // first and second case have multiple labels and no break after first group check("cond4", "11"); check("res41", true); check("res42", true); check("res43", false); // default case intermixed with other case labels in the second group check("cond5", "11"); check("res51", true); check("res52", true); check("res53", true); // default case intermixed, with break check("cond6", "16"); check("res61", false); check("res62", true); check("res63", false); // continue test check("res7", Arrays.asList( false, false, false, true, true, false, true, true, false, false, true, false, false, true, false, false, false, true)); // return test check("res8", Arrays.asList("0123", "123", "23", "3", "4", "3")); } public void test_while() { doCompile("test_while"); // simple while check("res1", Arrays.asList(0, 1, 2)); // continue check("res2", Arrays.asList(0, 2)); // break check("res3", Arrays.asList(0)); } public void test_do_while() { doCompile("test_do_while"); // simple while check("res1", Arrays.asList(0, 1, 2)); // continue check("res2", Arrays.asList(0, null, 2)); // break check("res3", Arrays.asList(0)); } public void test_for() { doCompile("test_for"); // simple loop check("res1", Arrays.asList(0,1,2)); // continue check("res2", Arrays.asList(0,null,2)); // break check("res3", Arrays.asList(0)); // empty init check("res4", Arrays.asList(0,1,2)); // empty update check("res5", Arrays.asList(0,1,2)); // empty final condition check("res6", Arrays.asList(0,1,2)); // all conditions empty check("res7", Arrays.asList(0,1,2)); } public void test_for1() { //5125: CTL2: "for" cycle is EXTREMELY memory consuming doCompile("test_for1"); checkEquals("counter", "COUNT"); } @SuppressWarnings("unchecked") public void test_foreach() { doCompile("test_foreach"); check("intRes", Arrays.asList(VALUE_VALUE)); check("longRes", Arrays.asList(BORN_MILLISEC_VALUE)); check("doubleRes", Arrays.asList(AGE_VALUE)); check("decimalRes", Arrays.asList(CURRENCY_VALUE)); check("booleanRes", Arrays.asList(FLAG_VALUE)); check("stringRes", Arrays.asList(NAME_VALUE, CITY_VALUE)); check("dateRes", Arrays.asList(BORN_VALUE)); List<?> integerStringMapResTmp = (List<?>) getVariable("integerStringMapRes"); List<String> integerStringMapRes = new ArrayList<String>(integerStringMapResTmp.size()); for (Object o: integerStringMapResTmp) { integerStringMapRes.add(String.valueOf(o)); } List<Integer> stringIntegerMapRes = (List<Integer>) getVariable("stringIntegerMapRes"); List<DataRecord> stringRecordMapRes = (List<DataRecord>) getVariable("stringRecordMapRes"); Collections.sort(integerStringMapRes); Collections.sort(stringIntegerMapRes); assertEquals(Arrays.asList("0", "1", "2", "3", "4"), integerStringMapRes); assertEquals(Arrays.asList(0, 1, 2, 3, 4), stringIntegerMapRes); final int N = 5; assertEquals(N, stringRecordMapRes.size()); int equalRecords = 0; for (int i = 0; i < N; i++) { for (DataRecord r: stringRecordMapRes) { if (Integer.valueOf(i).equals(r.getField("Value").getValue()) && "A string".equals(String.valueOf(r.getField("Name").getValue()))) { equalRecords++; break; } } } assertEquals(N, equalRecords); } public void test_return(){ doCompile("test_return"); check("lhs", Integer.valueOf(1)); check("rhs", Integer.valueOf(2)); check("res", Integer.valueOf(3)); } public void test_return_incorrect() { doCompileExpectError("test_return_incorrect", "Can't convert from 'string' to 'integer'"); } public void test_return_void() { doCompile("test_return_void"); } public void test_overloading() { doCompile("test_overloading"); check("res1", Integer.valueOf(3)); check("res2", "Memento mori"); } public void test_overloading_incorrect() { doCompileExpectErrors("test_overloading_incorrect", Arrays.asList( "Duplicate function 'integer sum(integer, integer)'", "Duplicate function 'integer sum(integer, integer)'")); } //Test case for 4038 public void test_function_parameter_without_type() { doCompileExpectError("test_function_parameter_without_type", "Syntax error on token ')'"); } public void test_duplicate_import() { URL importLoc = getClass().getSuperclass().getResource("test_duplicate_import.ctl"); String expStr = "import '" + importLoc + "';\n"; expStr += "import '" + importLoc + "';\n"; doCompile(expStr, "test_duplicate_import"); } /*TODO: * public void test_invalid_import() { URL importLoc = getClass().getResource("test_duplicate_import.ctl"); String expStr = "import '/a/b/c/d/e/f/g/h/i/j/k/l/m';\n"; expStr += expStr; doCompileExpectError(expStr, "test_invalid_import", Arrays.asList("TODO: Unknown error")); //doCompileExpectError(expStr, "test_duplicate_import", Arrays.asList("TODO: Unknown error")); } */ public void test_built_in_functions(){ doCompile("test_built_in_functions"); check("notNullValue", Integer.valueOf(1)); checkNull("nullValue"); check("isNullRes1", false); check("isNullRes2", true); assertEquals("nvlRes1", getVariable("notNullValue"), getVariable("nvlRes1")); check("nvlRes2", Integer.valueOf(2)); assertEquals("nvl2Res1", getVariable("notNullValue"), getVariable("nvl2Res1")); check("nvl2Res2", Integer.valueOf(2)); check("iifRes1", Integer.valueOf(2)); check("iifRes2", Integer.valueOf(1)); } public void test_mapping(){ doCompile("test_mapping"); // simple mappings assertEquals("Name", NAME_VALUE, outputRecords[0].getField("Name").getValue().toString()); assertEquals("Age", AGE_VALUE, outputRecords[0].getField("Age").getValue()); assertEquals("City", CITY_VALUE, outputRecords[0].getField("City").getValue().toString()); assertEquals("Born", BORN_VALUE, outputRecords[0].getField("Born").getValue()); // * mapping assertTrue(recordEquals(inputRecords[1], outputRecords[1])); check("len", 2); } public void test_mapping_null_values() { doCompile("test_mapping_null_values"); assertTrue(recordEquals(inputRecords[2], outputRecords[0])); } public void test_copyByName() { doCompile("test_copyByName"); assertEquals("Field1", null, outputRecords[3].getField("Field1").getValue()); assertEquals("Age", AGE_VALUE, outputRecords[3].getField("Age").getValue()); assertEquals("City", CITY_VALUE, outputRecords[3].getField("City").getValue().toString()); } public void test_copyByName_assignment() { doCompile("test_copyByName_assignment"); assertEquals("Field1", null, outputRecords[3].getField("Field1").getValue()); assertEquals("Age", AGE_VALUE, outputRecords[3].getField("Age").getValue()); assertEquals("City", CITY_VALUE, outputRecords[3].getField("City").getValue().toString()); } public void test_copyByName_assignment1() { doCompile("test_copyByName_assignment1"); assertEquals("Field1", null, outputRecords[3].getField("Field1").getValue()); assertEquals("Age", null, outputRecords[3].getField("Age").getValue()); assertEquals("City", null, outputRecords[3].getField("City").getValue()); } public void test_sequence(){ doCompile("test_sequence"); check("intRes", Arrays.asList(0,1,2)); check("longRes", Arrays.asList(Long.valueOf(0),Long.valueOf(1),Long.valueOf(2))); check("stringRes", Arrays.asList("0","1","2")); check("intCurrent", Integer.valueOf(2)); check("longCurrent", Long.valueOf(2)); check("stringCurrent", "2"); } //TODO: If this test fails please double check whether the test is correct? public void test_lookup(){ doCompile("test_lookup"); check("alphaResult", Arrays.asList("Andorra la Vella","Andorra la Vella")); check("bravoResult", Arrays.asList("Bruxelles","Bruxelles")); check("charlieResult", Arrays.asList("Chamonix","Chodov","Chomutov","Chamonix","Chodov","Chomutov")); check("countResult", Arrays.asList(3,3)); check("charlieUpdatedCount", 5); check("charlieUpdatedResult", Arrays.asList("Chamonix", "Cheb", "Chodov", "Chomutov", "Chrudim")); check("putResult", true); } public void test_containerlib_append() { doCompile("test_containerlib_append"); check("appendElem", Integer.valueOf(10)); check("appendList", Arrays.asList(1, 2, 3, 4, 5, 10)); } @SuppressWarnings("unchecked") public void test_containerlib_clear() { doCompile("test_containerlib_clear"); assertTrue(((List<Integer>) getVariable("clearList")).isEmpty()); } public void test_containerlib_copy() { doCompile("test_containerlib_copy"); check("copyList", Arrays.asList(1, 2, 3, 4, 5)); check("returnedList", Arrays.asList(1, 2, 3, 4, 5)); Map<String, String> expectedMap = new HashMap<String, String>(); expectedMap.put("a", "a"); expectedMap.put("b", "b"); expectedMap.put("c", "c"); expectedMap.put("d", "d"); check("copyMap", expectedMap); check("returnedMap", expectedMap); } public void test_containerlib_insert() { doCompile("test_containerlib_insert"); check("insertElem", Integer.valueOf(7)); check("insertIndex", Integer.valueOf(3)); check("insertList", Arrays.asList(1, 2, 3, 7, 4, 5)); check("insertList1", Arrays.asList(7, 8, 11, 10, 11)); check("insertList2", Arrays.asList(7, 8, 10, 9, 11)); } public void test_containerlib_isEmpty() { doCompile("test_containerlib_isEmpty"); check("emptyMap", true); check("fullMap", false); check("emptyList", true); check("fullList", false); } public void test_containerlib_poll() { doCompile("test_containerlib_poll"); check("pollElem", Integer.valueOf(1)); check("pollList", Arrays.asList(2, 3, 4, 5)); } public void test_containerlib_pop() { doCompile("test_containerlib_pop"); check("popElem", Integer.valueOf(5)); check("popList", Arrays.asList(1, 2, 3, 4)); } @SuppressWarnings("unchecked") public void test_containerlib_push() { doCompile("test_containerlib_push"); check("pushElem", Integer.valueOf(6)); check("pushList", Arrays.asList(1, 2, 3, 4, 5, 6)); // there is hardly any way to get an instance of DataRecord // hence we just check if the list has correct size // and if its elements have correct metadata List<DataRecord> recordList = (List<DataRecord>) getVariable("recordList"); List<DataRecordMetadata> mdList = Arrays.asList( graph.getDataRecordMetadata(OUTPUT_1), graph.getDataRecordMetadata(INPUT_2), graph.getDataRecordMetadata(INPUT_1) ); assertEquals(mdList.size(), recordList.size()); for (int i = 0; i < mdList.size(); i++) { assertEquals(mdList.get(i), recordList.get(i).getMetadata()); } } public void test_containerlib_remove() { doCompile("test_containerlib_remove"); check("removeElem", Integer.valueOf(3)); check("removeIndex", Integer.valueOf(2)); check("removeList", Arrays.asList(1, 2, 4, 5)); } public void test_containerlib_reverse() { doCompile("test_containerlib_reverse"); check("reverseList", Arrays.asList(5, 4, 3, 2, 1)); } public void test_containerlib_sort() { doCompile("test_containerlib_sort"); check("sortList", Arrays.asList(1, 1, 2, 3, 5)); } public void test_containerlib_containsAll() { doCompile("test_containerlib_containsAll"); check("results", Arrays.asList(true, false, true, false, true, true, true, false, true, true, false)); } public void test_containerlib_containsKey() { doCompile("test_containerlib_containsKey"); check("results", Arrays.asList(false, true, false, true, false, true)); } public void test_containerlib_containsValue() { doCompile("test_containerlib_containsValue"); check("results", Arrays.asList(true, false, false, true, false, false, true, false)); } public void test_containerlib_getKeys() { doCompile("test_containerlib_getKeys"); Map<?, ?> stringIntegerMap = (Map<?, ?>) inputRecords[3].getField("integerMapField").getValue(); Map<?, ?> integerStringMap = (Map<?, ?>) getVariable("integerStringMap"); List<?> stringList = (List<?>) getVariable("stringList"); List<?> integerList = (List<?>) getVariable("integerList"); assertEquals(stringIntegerMap.keySet().size(), stringList.size()); assertEquals(integerStringMap.keySet().size(), integerList.size()); assertEquals(stringIntegerMap.keySet(), new HashSet<Object>(stringList)); assertEquals(integerStringMap.keySet(), new HashSet<Object>(integerList)); } public void test_stringlib_cache() { doCompile("test_stringlib_cache"); check("rep1", "The cat says meow. All cats say meow."); check("rep2", "The cat says meow. All cats say meow."); check("rep3", "The cat says meow. All cats say meow."); check("find1", Arrays.asList("to", "to", "to", "tro", "to")); check("find2", Arrays.asList("to", "to", "to", "tro", "to")); check("find3", Arrays.asList("to", "to", "to", "tro", "to")); check("split1", Arrays.asList("one", "two", "three", "four", "five")); check("split2", Arrays.asList("one", "two", "three", "four", "five")); check("split3", Arrays.asList("one", "two", "three", "four", "five")); check("chop01", "ting soming choping function"); check("chop02", "ting soming choping function"); check("chop03", "ting soming choping function"); check("chop11", "testing end of lines cutting"); check("chop12", "testing end of lines cutting"); } public void test_stringlib_charAt() { doCompile("test_stringlib_charAt"); String input = "The QUICk !!$ broWn fox juMPS over the lazy DOG "; String[] expected = new String[input.length()]; for (int i = 0; i < expected.length; i++) { expected[i] = String.valueOf(input.charAt(i)); } check("chars", Arrays.asList(expected)); } public void test_stringlib_charAt_error(){ //test: attempt to access char at position, which is out of bounds -> upper bound try { doCompile("string test;function integer transform(){test = charAt('milk', 7);return 0;}", "test_stringlib_charAt_error"); fail(); } catch (Exception e) { // do nothing } //test: attempt to access char at position, which is out of bounds -> lower bound try { doCompile("string test;function integer transform(){test = charAt('milk', -1);return 0;}", "test_stringlib_charAt_error"); fail(); } catch (Exception e) { // do nothing } //test: argument for position is null try { doCompile("string test; integer i = null; function integer transform(){test = charAt('milk', i);return 0;}", "test_stringlib_charAt_error"); fail(); } catch (Exception e) { // do nothing } //test: input is null try { doCompile("string test;function integer transform(){test = charAt(null, 1);return 0;}", "test_stringlib_charAt_error"); fail(); } catch (Exception e) { // do nothing } //test: input is empty string try { doCompile("string test;function integer transform(){test = charAt('', 1);return 0;}", "test_stringlib_charAt_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_concat() { doCompile("test_stringlib_concat"); final SimpleDateFormat format = new SimpleDateFormat(); format.applyPattern("yyyy MMM dd"); check("concat", ""); check("concat1", "ello hi ELLO 2,today is " + format.format(new Date())); check("concat2", ""); check("concat3", "clover"); check("test_null1", "null"); check("test_null2", "null"); check("test_null3","skynullisnullblue"); } public void test_stringlib_countChar() { doCompile("test_stringlib_countChar"); check("charCount", 3); check("count2", 0); } public void test_stringlib_countChar_emptychar() { // test: attempt to count empty chars in string. try { doCompile("integer charCount;function integer transform() {charCount = countChar('aaa','');return 0;}", "test_stringlib_countChar_emptychar"); fail(); } catch (Exception e) { // do nothing } // test: attempt to count empty chars in empty string. try { doCompile("integer charCount;function integer transform() {charCount = countChar('','');return 0;}", "test_stringlib_countChar_emptychar"); fail(); } catch (Exception e) { // do nothing } //test: null input - test 1 try { doCompile("integer charCount;function integer transform() {charCount = countChar(null,'a');return 0;}", "test_stringlib_countChar_emptychar"); fail(); } catch (Exception e) { // do nothing } //test: null input - test 2 try { doCompile("integer charCount;function integer transform() {charCount = countChar(null,'');return 0;}", "test_stringlib_countChar_emptychar"); fail(); } catch (Exception e) { // do nothing } //test: null input - test 3 try { doCompile("integer charCount;function integer transform() {charCount = countChar(null, null);return 0;}", "test_stringlib_countChar_emptychar"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_cut() { doCompile("test_stringlib_cut"); check("cutInput", Arrays.asList("a", "1edf", "h3ijk")); } public void test_string_cut_expect_error() { // test: Attempt to cut substring from position after the end of original string. E.g. string is 6 char long and // user attempt to cut out after position 8. try { doCompile("string input;string[] cutInput;function integer transform() {input = 'abc1edf2geh3ijk10lmn999opq';cutInput = cut(input,[28,3]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } // test: Attempt to cut substring longer then possible. E.g. string is 6 characters long and user cuts from // position // 4 substring 4 characters long try { doCompile("string input;string[] cutInput;function integer transform() {input = 'abc1edf2geh3ijk10lmn999opq';cutInput = cut(input,[20,8]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } // test: Attempt to cut a substring with negative length try { doCompile("string input;string[] cutInput;function integer transform() {input = 'abc1edf2geh3ijk10lmn999opq';cutInput = cut(input,[20,-3]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } // test: Attempt to cut substring from negative position. E.g cut([-3,3]). try { doCompile("string input;string[] cutInput;function integer transform() {input = 'abc1edf2geh3ijk10lmn999opq';cutInput = cut(input,[-3,3]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: input is empty string try { doCompile("string input;string[] cutInput;function integer transform() {input = '';cutInput = cut(input,[0,3]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: second arg is null try { doCompile("string input;string[] cutInput;function integer transform() {input = 'aaaa';cutInput = cut(input,null);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: input is null try { doCompile("string input;string[] cutInput;function integer transform() {input = null;cutInput = cut(input,[5,11]);return 0;}", "test_stringlib_cut_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_editDistance() { doCompile("test_stringlib_editDistance"); check("dist", 1); check("dist1", 1); check("dist2", 0); check("dist5", 1); check("dist3", 1); check("dist4", 0); check("dist6", 4); check("dist7", 5); check("dist8", 0); check("dist9", 0); } public void test_stringlib_editDistance_expect_error(){ //test: input - empty string - first arg try { doCompile("integer test;function integer transform() {test = editDistance('','mark');return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } //test: input - null - first arg try { doCompile("integer test;function integer transform() {test = editDistance(null,'mark');return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } //test: input- empty string - second arg try { doCompile("integer test;function integer transform() {test = editDistance('mark','');return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } //test: input - null - second argument try { doCompile("integer test;function integer transform() {test = editDistance('mark',null);return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } //test: input - both empty try { doCompile("integer test;function integer transform() {test = editDistance('','');return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } //test: input - both null try { doCompile("integer test;function integer transform() {test = editDistance(null,null);return 0;}","test_stringlib_editDistance_expect_error"); } catch ( Exception e) { // do nothing } } public void test_stringlib_find() { doCompile("test_stringlib_find"); check("findList", Arrays.asList("The quick br", "wn f", "x jumps ", "ver the lazy d", "g")); check("findList2", Arrays.asList("mark.twain")); check("findList3", Arrays.asList()); check("findList4", Arrays.asList("", "", "", "", "")); check("findList5", Arrays.asList("twain")); check("findList6", Arrays.asList("")); } public void test_stringlib_find_expect_error() { //test: regexp group number higher then count of regexp groups try { doCompile("string[] findList;function integer transform() {findList = find('mark.twain@javlin.eu','(^[a-z]*).([a-z]*)',5); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } //test: negative regexp group number try { doCompile("string[] findList;function integer transform() {findList = find('mark.twain@javlin.eu','(^[a-z]*).([a-z]*)',-1); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } //test: arg1 null input try { doCompile("string[] findList;function integer transform() {findList = find(null,'(^[a-z]*).([a-z]*)'); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } //test: arg2 null input - test1 try { doCompile("string[] findList;function integer transform() {findList = find('mark.twain@javlin.eu',null); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } //test: arg2 null input - test2 try { doCompile("string[] findList;function integer transform() {findList = find('',null); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } //test: arg1 and arg2 null input try { doCompile("string[] findList;function integer transform() {findList = find(null,null); return 0;}", "test_stringlib_find_expect_error"); } catch (Exception e) { // do nothing } } public void test_stringlib_join() { doCompile("test_stringlib_join"); //check("joinedString", "Bagr,3,3.5641,-87L,CTL2"); check("joinedString1", "80=5455.987\"-5=5455.987\"3=0.1"); check("joinedString2", "5.054.6567.0231.0"); //check("joinedString3", "554656723180=5455.987-5=5455.9873=0.1CTL242"); check("test_empty1", "abc"); check("test_empty2", ""); check("test_empty3"," "); check("test_empty4","anullb"); check("test_empty5","80=5455.987-5=5455.9873=0.1"); check("test_empty6","80=5455.987 -5=5455.987 3=0.1"); check("test_null1","abc"); check("test_null2",""); check("test_null3","anullb"); check("test_null4","80=5455.987-5=5455.9873=0.1"); //CLO-1210 // check("test_empty7","a=xb=nullc=z"); // check("test_empty8","a=x b=null c=z"); // check("test_empty9","null=xeco=storm"); // check("test_empty10","null=x eco=storm"); // check("test_null5","a=xb=nullc=z"); // check("test_null6","null=xeco=storm"); } public void test_stringlib_join_expect_error(){ try { doCompile("function integer transform(){string s = join(';',null);return 0;}","test_stringlib_join_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string[] tmp = null; string s = join(';',tmp);return 0;}","test_stringlib_join_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){map[string,string] a = null; string s = join(';',a);return 0;}","test_stringlib_join_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_left() { //CLO - 1193 // doCompile("test_stringlib_left"); // check("test1", "aa"); // check("test2", "aaa"); // check("test3", ""); // check("test4", null); // check("test5", "abc"); // check("test6", "ab "); // check("test7", " "); // check("test8", " "); // check("test9", "abc"); // check("test10", "abc"); // check("test11", ""); // check("test12", null); } public void test_stringlib_length() { doCompile("test_stringlib_length"); check("lenght1", new BigDecimal(50)); check("lenghtByte", 18); check("stringLength", 8); check("listLength", 8); check("mapLength", 3); check("recordLength", 9); check("length_empty", 0); check("length_null1", 0); check("length_null2", 0); check("length_null3", 0); check("length_null4", 0); check("length_null5", 0); } public void test_stringlib_lowerCase() { doCompile("test_stringlib_lowerCase"); check("lower", "the quick !!$ brown fox jumps over the lazy dog bagr "); check("lower_empty", ""); check("lower_null", null); } public void test_stringlib_matches() { doCompile("test_stringlib_matches"); check("matches1", true); check("matches2", true); check("matches3", false); check("matches4", true); check("matches5", false); check("matches6", false); check("matches7", false); check("matches8", false); check("matches9", true); check("matches10", true); } public void test_stringlib_matches_expect_error(){ //test: regexp param null - test 1 try { doCompile("boolean test; function integer transform(){test = matches('aaa', null); return 0;}","test_stringlib_matches_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp param null - test 2 try { doCompile("boolean test; function integer transform(){test = matches('', null); return 0;}","test_stringlib_matches_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp param null - test 3 try { doCompile("boolean test; function integer transform(){test = matches(null, null); return 0;}","test_stringlib_matches_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_matchGroups() { doCompile("test_stringlib_matchGroups"); check("result1", null); check("result2", Arrays.asList( //"(([^:]*)([:])([\\(]))(.*)(\\))((( "zip:(zip:(/path/name?.zip)#innerfolder/file.zip)#innermostfolder?/filename*.txt", "zip:(", "zip", ":", "(", "zip:(/path/name?.zip)#innerfolder/file.zip", ")", "#innermostfolder?/filename*.txt", "#innermostfolder?/filename*.txt", " "innermostfolder?/filename*.txt", null ) ); check("result3", null); check("test_empty1", null); check("test_empty2", Arrays.asList("")); check("test_null1", null); check("test_null2", null); } public void test_stringlib_matchGroups_expect_error(){ //test: regexp is null - test 1 try { doCompile("string[] test; function integer transform(){test = matchGroups('eat all the cookies',null); return 0;}","test_stringlib_matchGroups_expect_error"); } catch (Exception e) { // do nothing } //test: regexp is null - test 2 try { doCompile("string[] test; function integer transform(){test = matchGroups('',null); return 0;}","test_stringlib_matchGroups_expect_error"); } catch (Exception e) { // do nothing } //test: regexp is null - test 3 try { doCompile("string[] test; function integer transform(){test = matchGroups(null,null); return 0;}","test_stringlib_matchGroups_expect_error"); } catch (Exception e) { // do nothing } } public void test_stringlib_matchGroups_unmodifiable() { try { doCompile("test_stringlib_matchGroups_unmodifiable"); fail(); } catch (RuntimeException re) { }; } public void test_stringlib_metaphone() { doCompile("test_stringlib_metaphone"); check("metaphone1", "XRS"); check("metaphone2", "KWNTLN"); check("metaphone3", "KWNT"); check("metaphone4", ""); check("metaphone5", ""); check("test_empty1", ""); check("test_empty2", ""); check("test_null1", null); check("test_null2", null); } public void test_stringlib_nysiis() { doCompile("test_stringlib_nysiis"); check("nysiis1", "CAP"); check("nysiis2", "CAP"); check("nysiis3", "1234"); check("nysiis4", "C2 PRADACTAN"); check("nysiis_empty", ""); check("nysiis_null", null); } public void test_stringlib_replace() { doCompile("test_stringlib_replace"); final SimpleDateFormat format = new SimpleDateFormat(); format.applyPattern("yyyy MMM dd"); check("rep", format.format(new Date()).replaceAll("[lL]", "t")); check("rep1", "The cat says meow. All cats say meow."); check("rep2", "intruders must die"); check("test_empty1", "a"); check("test_empty2", ""); check("test_null", null); check("test_null2",""); check("test_null3","bbb"); check("test_null4",null); } public void test_stringlib_replace_expect_error(){ //test: regexp null - test1 try { doCompile("string test; function integer transform(){test = replace('a b',null,'b'); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp null - test2 try { doCompile("string test; function integer transform(){test = replace('',null,'b'); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp null - test3 try { doCompile("string test; function integer transform(){test = replace(null,null,'b'); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: arg3 null - test1 try { doCompile("string test; function integer transform(){test = replace('a b','a+',null); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } // //test: arg3 null - test2 // try { // doCompile("string test; function integer transform(){test = replace('','a+',null); return 0;}","test_stringlib_replace_expect_error"); // fail(); // } catch (Exception e) { // // do nothing // //test: arg3 null - test3 // try { // doCompile("string test; function integer transform(){test = replace(null,'a+',null); return 0;}","test_stringlib_replace_expect_error"); // fail(); // } catch (Exception e) { // // do nothing //test: regexp and arg3 null - test1 try { doCompile("string test; function integer transform(){test = replace('a b',null,null); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp and arg3 null - test1 try { doCompile("string test; function integer transform(){test = replace(null,null,null); return 0;}","test_stringlib_replace_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_right() { doCompile("test_stringlib_right"); check("righ", "y dog"); check("rightNotPadded", "y dog"); check("rightPadded", "y dog"); check("padded", " y dog"); check("notPadded", "y dog"); check("short", "Dog"); check("shortNotPadded", "Dog"); check("shortPadded", " Dog"); check("simple", "milk"); check("test_null1", null); check("test_null2", null); check("test_null3", " "); check("test_empty1", ""); check("test_empty2", ""); check("test_empty3"," "); } public void test_stringlib_soundex() { doCompile("test_stringlib_soundex"); check("soundex1", "W630"); check("soundex2", "W643"); check("test_null", null); check("test_empty", ""); } public void test_stringlib_split() { doCompile("test_stringlib_split"); check("split1", Arrays.asList("The quick br", "wn f", "", " jumps " , "ver the lazy d", "g")); check("test_empty", Arrays.asList("")); check("test_empty2", Arrays.asList("","a","a")); List<String> tmp = new ArrayList<String>(); tmp.add(null); check("test_null", tmp); } public void test_stringlib_split_expect_error(){ //test: regexp null - test1 try { doCompile("function integer transform(){string[] s = split('aaa',null); return 0;}","test_stringlib_split_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp null - test2 try { doCompile("function integer transform(){string[] s = split('',null); return 0;}","test_stringlib_split_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp null - test3 try { doCompile("function integer transform(){string[] s = split(null,null); return 0;}","test_stringlib_split_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_substring() { doCompile("test_stringlib_substring"); check("subs", "UICk "); check("test1", ""); check("test_empty", ""); } public void test_stringlib_substring_expect_error(){ try { doCompile("function integer transform(){string test = substring('arabela',4,19);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring('arabela',15,3);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring('arabela',2,-3);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring('arabela',-5,7);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring('',0,7);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring('',7,7);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring(null,0,0);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring(null,0,4);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = substring(null,1,4);return 0;}","test_stringlib_substring_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_trim() { doCompile("test_stringlib_trim"); check("trim1", "im The QUICk !!$ broWn fox juMPS over the lazy DOG"); check("trim_empty", ""); check("trim_null", null); } public void test_stringlib_upperCase() { doCompile("test_stringlib_upperCase"); check("upper", "THE QUICK !!$ BROWN FOX JUMPS OVER THE LAZY DOG BAGR "); check("test_empty", ""); check("test_null", null); } public void test_stringlib_isFormat() { doCompile("test_stringlib_isFormat"); check("test", "test"); check("isBlank", Boolean.FALSE); check("blank", ""); checkNull("nullValue"); check("isBlank1", true); check("isBlank2", true); check("isAscii1", true); check("isAscii2", false); check("isAscii3", true); check("isAscii4", true); check("isNumber", false); check("isNumber1", false); check("isNumber2", true); check("isNumber3", true); check("isNumber4", false); check("isNumber5", true); check("isNumber6", true); check("isNumber7", false); check("isNumber8", false); check("isInteger", false); check("isInteger1", false); check("isInteger2", false); check("isInteger3", true); check("isInteger4", false); check("isInteger5", false); check("isLong", true); check("isLong1", false); check("isLong2", false); check("isLong3", false); check("isLong4", false); check("isDate", true); check("isDate1", false); // "kk" allows hour to be 1-24 (as opposed to HH allowing hour to be 0-23) check("isDate2", true); check("isDate3", true); check("isDate4", false); check("isDate5", true); check("isDate6", true); check("isDate7", false); check("isDate9", false); check("isDate10", false); check("isDate11", false); check("isDate12", true); check("isDate13", false); check("isDate14", false); // empty string: invalid check("isDate15", false); check("isDate16", false); check("isDate17", true); check("isDate18", true); check("isDate19", false); check("isDate20", false); check("isDate21", false); /* CLO-1190 check("isDate22", false); check("isDate23", false); check("isDate24", true); check("isDate25", false); */ } public void test_stringlib_empty_strings() { String[] expressions = new String[] { "isInteger(?)", "isNumber(?)", "isLong(?)", "isAscii(?)", "isBlank(?)", "isDate(?, \"yyyy\")", "isUrl(?)", "string x = ?; length(x)", "lowerCase(?)", "matches(?, \"\")", "NYSIIS(?)", "removeBlankSpace(?)", "removeDiacritic(?)", "removeNonAscii(?)", "removeNonPrintable(?)", "replace(?, \"a\", \"a\")", "translate(?, \"ab\", \"cd\")", "trim(?)", "upperCase(?)", "chop(?)", "concat(?)", "getAlphanumericChars(?)", }; StringBuilder sb = new StringBuilder(); for (String expr : expressions) { String emptyString = expr.replace("?", "\"\""); boolean crashesEmpty = test_expression_crashes(emptyString); assertFalse("Function " + emptyString + " crashed", crashesEmpty); String nullString = expr.replace("?", "null"); boolean crashesNull = test_expression_crashes(nullString); sb.append(String.format("|%20s|%5s|%5s|%n", expr, crashesEmpty ? "CRASH" : "ok", crashesNull ? "CRASH" : "ok")); } System.out.println(sb.toString()); } private boolean test_expression_crashes(String expr) { String expStr = "function integer transform() { " + expr + "; return 0; }"; try { doCompile(expStr, "test_stringlib_empty_null_strings"); return false; } catch (RuntimeException e) { return true; } } public void test_stringlib_removeBlankSpace() { String expStr = "string r1;\n" + "string str_empty;\n" + "string str_null;\n" + "function integer transform() {\n" + "r1=removeBlankSpace(\"" + StringUtils.specCharToString(" a b\nc\rd e \u000Cf\r\n") + "\");\n" + "printErr(r1);\n" + "str_empty = removeBlankSpace('');\n" + "str_null = removeBlankSpace(null);\n" + "return 0;\n" + "}\n"; doCompile(expStr, "test_removeBlankSpace"); check("r1", "abcdef"); check("str_empty", ""); check("str_null", null); } public void test_stringlib_removeNonPrintable() { doCompile("test_stringlib_removeNonPrintable"); check("nonPrintableRemoved", "AHOJ"); check("test_empty", ""); check("test_null", null); } public void test_stringlib_getAlphanumericChars() { String expStr = "string an1;\n" + "string an2;\n" + "string an3;\n" + "string an4;\n" + "string an5;\n" + "string an6;\n" + "string an7;\n" + "string an8;\n" + "string an9;\n" + "string an10;\n" + "string an11;\n" + "string an12;\n" + "string an13;\n" + "string an14;\n" + "string an15;\n" + "function integer transform() {\n" + "an1=getAlphanumericChars(\"" + StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\");\n" + "an2=getAlphanumericChars(\"" + StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\",true,true);\n" + "an3=getAlphanumericChars(\"" + StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\",true,false);\n" + "an4=getAlphanumericChars(\"" + StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\",false,true);\n" + "an5=getAlphanumericChars(\"\");\n" + "an6=getAlphanumericChars(\"\",true,true);\n"+ "an7=getAlphanumericChars(\"\",true,false);\n"+ "an8=getAlphanumericChars(\"\",false,true);\n"+ "an9=getAlphanumericChars(null);\n" + "an10=getAlphanumericChars(null,false,false);\n" + "an11=getAlphanumericChars(null,true,false);\n" + "an12=getAlphanumericChars(null,false,true);\n" + "an13=getAlphanumericChars(' 0 ľeškó11');\n" + "an14=getAlphanumericChars(' 0 ľeškó11', false, false);\n" + //CLO-1174 "string tmp = \""+StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\";\n"+ "printErr('BEFORE DO COMPILE: '+tmp); \n"+ "an15=getAlphanumericChars(\"" + StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n") + "\",false,false);\n" + "printErr('AFTER GET_ALPHA_NUMERIC_CHARS: '+ an15);\n" + "return 0;\n" + "}\n"; doCompile(expStr, "test_getAlphanumericChars"); check("an1", "a1bcde2f"); check("an2", "a1bcde2f"); check("an3", "abcdef"); check("an4", "12"); check("an5", ""); check("an6", ""); check("an7", ""); check("an8", ""); check("an9", null); check("an10", null); check("an11", null); check("an12", null); check("an13", "0ľeškó11"); check("an14"," 0 ľeškó11"); //CLO-1174 String tmp = StringUtils.specCharToString(" a 1b\nc\rd \b e \u000C2f\r\n"); System.out.println("FROM JAVA - AFTER DO COMPILE: "+ tmp); //check("an15", tmp); } public void test_stringlib_indexOf(){ doCompile("test_stringlib_indexOf"); check("index",2); check("index1",9); check("index2",0); check("index3",-1); check("index4",6); check("index5",-1); check("index6",0); check("index7",4); check("index8",4); check("index9", -1); check("index10", 2); check("index_empty1", -1); check("index_empty2", 0); check("index_empty3", 0); check("index_empty4", -1); } public void test_stringlib_indexOf_expect_error(){ //test: second arg is null - test1 try { doCompile("integer index;function integer transform() {index = indexOf('hello world',null); return 0;}","test_stringlib_indexOf_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: second arg is null - test2 try { doCompile("integer index;function integer transform() {index = indexOf('',null); return 0;}","test_stringlib_indexOf_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: first arg is null - test1 try { doCompile("integer index;function integer transform() {index = indexOf(null,'a'); return 0;}","test_stringlib_indexOf_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: first arg is null - test2 try { doCompile("integer index;function integer transform() {index = indexOf(null,''); return 0;}","test_stringlib_indexOf_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: both args are null try { doCompile("integer index;function integer transform() {index = indexOf(null,null); return 0;}","test_stringlib_indexOf_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_removeDiacritic(){ doCompile("test_stringlib_removeDiacritic"); check("test","tescik"); check("test1","zabicka"); check("test_empty", ""); check("test_null", null); } public void test_stringlib_translate(){ doCompile("test_stringlib_translate"); check("trans","hippi"); check("trans1","hipp"); check("trans2","hippi"); check("trans3",""); check("trans4","y lanuaX nXXd thX lXttXr X"); check("trans5", "hello"); check("test_empty1", ""); check("test_empty2", ""); check("test_null", null); } public void test_stringlib_translate_expect_error(){ try { doCompile("function integer transform(){string test = translate('bla bla',null,'o');return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = translate('bla bla','a',null);return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = translate('bla bla',null,null);return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = translate(null,'a',null);return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = translate(null,null,'a');return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string test = translate(null,null,null);return 0;}","test_stringlib_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_removeNonAscii(){ doCompile("test_stringlib_removeNonAscii"); check("test1", "Sun is shining"); check("test2", ""); check("test_empty", ""); check("test_null", null); } public void test_stringlib_chop() { doCompile("test_stringlib_chop"); check("s1", "hello"); check("s6", "hello"); check("s5", "hello"); check("s2", "hello"); check("s7", "helloworld"); check("s3", "hello "); check("s4", "hello"); check("s8", "hello"); check("s9", "world"); check("s10", "hello"); check("s11", "world"); check("s12", "mark.twain"); check("s13", "two words"); check("s14", ""); check("s15", ""); check("s16", ""); check("s17", ""); check("s18", ""); check("s19", "word"); check("s20", ""); check("s21", ""); check("s22", "mark.twain"); } public void test_stringlib_chop_expect_error() { //test: arg is null try { doCompile("string test;function integer transform() {test = chop(null);return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp pattern is null try { doCompile("string test;function integer transform() {test = chop('aaa', null);return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } //test: regexp pattern is null - test 2 try { doCompile("string test;function integer transform() {test = chop('', null);return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } //test: arg is null try { doCompile("string test;function integer transform() {test = chop(null, 'aaa');return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } //test: arg is null - test2 try { doCompile("string test;function integer transform() {test = chop(null, '');return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } //test: arg is null - test3 try { doCompile("string test;function integer transform() {test = chop(null, null);return 0;}","test_strlib_chop_erxpect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_bitwise_or() { doCompile("test_bitwise_or"); check("resultInt1", 1); check("resultInt2", 1); check("resultInt3", 3); check("resultInt4", 3); check("resultLong1", 1l); check("resultLong2", 1l); check("resultLong3", 3l); check("resultLong4", 3l); } public void test_bitwise_and() { doCompile("test_bitwise_and"); check("resultInt1", 0); check("resultInt2", 1); check("resultInt3", 0); check("resultInt4", 1); check("resultLong1", 0l); check("resultLong2", 1l); check("resultLong3", 0l); check("resultLong4", 1l); } public void test_bitwise_xor() { doCompile("test_bitwise_xor"); check("resultInt1", 1); check("resultInt2", 0); check("resultInt3", 3); check("resultInt4", 2); check("resultLong1", 1l); check("resultLong2", 0l); check("resultLong3", 3l); check("resultLong4", 2l); } public void test_bitwise_lshift() { doCompile("test_bitwise_lshift"); check("resultInt1", 2); check("resultInt2", 4); check("resultInt3", 10); check("resultInt4", 20); check("resultLong1", 2l); check("resultLong2", 4l); check("resultLong3", 10l); check("resultLong4", 20l); } public void test_bitwise_rshift() { doCompile("test_bitwise_rshift"); check("resultInt1", 2); check("resultInt2", 0); check("resultInt3", 4); check("resultInt4", 2); check("resultLong1", 2l); check("resultLong2", 0l); check("resultLong3", 4l); check("resultLong4", 2l); } public void test_bitwise_negate() { doCompile("test_bitwise_negate"); check("resultInt", -59081717); check("resultLong", -3321654987654105969L); } public void test_set_bit() { doCompile("test_set_bit"); check("resultInt1", 0x2FF); check("resultInt2", 0xFB); check("resultLong1", 0x4000000000000l); check("resultLong2", 0xFFDFFFFFFFFFFFFl); check("resultBool1", true); check("resultBool2", false); check("resultBool3", true); check("resultBool4", false); } public void test_mathlib_abs() { doCompile("test_mathlib_abs"); check("absIntegerPlus", new Integer(10)); check("absIntegerMinus", new Integer(1)); check("absLongPlus", new Long(10)); check("absLongMinus", new Long(1)); check("absDoublePlus", new Double(10.0)); check("absDoubleMinus", new Double(1.0)); check("absDecimalPlus", new BigDecimal(5.0)); check("absDecimalMinus", new BigDecimal(5.0)); } public void test_mathlib_ceil() { doCompile("test_mathlib_ceil"); check("ceil1", -3.0); check("intResult", Arrays.asList(2.0, 3.0)); check("longResult", Arrays.asList(2.0, 3.0)); check("doubleResult", Arrays.asList(3.0, -3.0)); check("decimalResult", Arrays.asList(3.0, -3.0)); } public void test_mathlib_e() { doCompile("test_mathlib_e"); check("varE", Math.E); } public void test_mathlib_exp() { doCompile("test_mathlib_exp"); check("ex", Math.exp(1.123)); } public void test_mathlib_floor() { doCompile("test_mathlib_floor"); check("floor1", -4.0); check("intResult", Arrays.asList(2.0, 3.0)); check("longResult", Arrays.asList(2.0, 3.0)); check("doubleResult", Arrays.asList(2.0, -4.0)); check("decimalResult", Arrays.asList(2.0, -4.0)); } public void test_mathlib_log() { doCompile("test_mathlib_log"); check("ln", Math.log(3)); } public void test_mathlib_log10() { doCompile("test_mathlib_log10"); check("varLog10", Math.log10(3)); } public void test_mathlib_pi() { doCompile("test_mathlib_pi"); check("varPi", Math.PI); } public void test_mathlib_pow() { doCompile("test_mathlib_pow"); check("power1", Math.pow(3,1.2)); check("power2", Double.NaN); check("intResult", Arrays.asList(8d, 8d, 8d, 8d)); check("longResult", Arrays.asList(8d, 8d, 8d, 8d)); check("doubleResult", Arrays.asList(8d, 8d, 8d, 8d)); check("decimalResult", Arrays.asList(8d, 8d, 8d, 8d)); } public void test_mathlib_round() { doCompile("test_mathlib_round"); check("round1", -4l); check("intResult", Arrays.asList(2l, 3l)); check("longResult", Arrays.asList(2l, 3l)); check("doubleResult", Arrays.asList(2l, 4l)); check("decimalResult", Arrays.asList(2l, 4l)); } public void test_mathlib_sqrt() { doCompile("test_mathlib_sqrt"); check("sqrtPi", Math.sqrt(Math.PI)); check("sqrt9", Math.sqrt(9)); } public void test_datelib_cache() { doCompile("test_datelib_cache"); check("b11", true); check("b12", true); check("b21", true); check("b22", true); check("b31", true); check("b32", true); check("b41", true); check("b42", true); checkEquals("date3", "date3d"); checkEquals("date4", "date4d"); checkEquals("date7", "date7d"); checkEquals("date8", "date8d"); } public void test_datelib_trunc() { doCompile("test_datelib_trunc"); check("truncDate", new GregorianCalendar(2004, 00, 02).getTime()); } public void test_datelib_truncDate() { doCompile("test_datelib_truncDate"); Calendar cal = Calendar.getInstance(); cal.setTime(BORN_VALUE); int[] portion = new int[]{cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND),cal.get(Calendar.MILLISECOND)}; cal.clear(); cal.set(Calendar.HOUR_OF_DAY, portion[0]); cal.set(Calendar.MINUTE, portion[1]); cal.set(Calendar.SECOND, portion[2]); cal.set(Calendar.MILLISECOND, portion[3]); check("truncBornDate", cal.getTime()); } public void test_datelib_today() { doCompile("test_datelib_today"); Date expectedDate = new Date(); //the returned date does not need to be exactly the same date which is in expectedData variable //let say 1000ms is tolerance for equality assertTrue("todayDate", Math.abs(expectedDate.getTime() - ((Date) getVariable("todayDate")).getTime()) < 1000); } public void test_datelib_zeroDate() { doCompile("test_datelib_zeroDate"); check("zeroDate", new Date(0)); } public void test_datelib_dateDiff() { doCompile("test_datelib_dateDiff"); long diffYears = Years.yearsBetween(new DateTime(), new DateTime(BORN_VALUE)).getYears(); check("ddiff", diffYears); long[] results = {1, 12, 52, 365, 8760, 525600, 31536000, 31536000000L}; String[] vars = {"ddiffYears", "ddiffMonths", "ddiffWeeks", "ddiffDays", "ddiffHours", "ddiffMinutes", "ddiffSeconds", "ddiffMilliseconds"}; for (int i = 0; i < results.length; i++) { check(vars[i], results[i]); } } public void test_datelib_dateAdd() { doCompile("test_datelib_dateAdd"); check("datum", new Date(BORN_MILLISEC_VALUE + 100)); } public void test_datelib_extractTime() { doCompile("test_datelib_extractTime"); Calendar cal = Calendar.getInstance(); cal.setTime(BORN_VALUE); int[] portion = new int[]{cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND),cal.get(Calendar.MILLISECOND)}; cal.clear(); cal.set(Calendar.HOUR_OF_DAY, portion[0]); cal.set(Calendar.MINUTE, portion[1]); cal.set(Calendar.SECOND, portion[2]); cal.set(Calendar.MILLISECOND, portion[3]); check("bornExtractTime", cal.getTime()); check("originalDate", BORN_VALUE); } public void test_datelib_extractDate() { doCompile("test_datelib_extractDate"); Calendar cal = Calendar.getInstance(); cal.setTime(BORN_VALUE); int[] portion = new int[]{cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.MONTH), cal.get(Calendar.YEAR)}; cal.clear(); cal.set(Calendar.DAY_OF_MONTH, portion[0]); cal.set(Calendar.MONTH, portion[1]); cal.set(Calendar.YEAR, portion[2]); check("bornExtractDate", cal.getTime()); check("originalDate", BORN_VALUE); } public void test_datelib_createDate() { doCompile("test_datelib_createDate"); Calendar cal = Calendar.getInstance(); // no time zone cal.clear(); cal.set(2013, 5, 11); check("date1", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); check("dateTime1", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); cal.set(Calendar.MILLISECOND, 123); check("dateTimeMillis1", cal.getTime()); // literal cal.setTimeZone(TimeZone.getTimeZone("GMT+5")); cal.clear(); cal.set(2013, 5, 11); check("date2", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); check("dateTime2", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); cal.set(Calendar.MILLISECOND, 123); check("dateTimeMillis2", cal.getTime()); // variable cal.clear(); cal.set(2013, 5, 11); check("date3", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); check("dateTime3", cal.getTime()); cal.clear(); cal.set(2013, 5, 11, 14, 27, 53); cal.set(Calendar.MILLISECOND, 123); check("dateTimeMillis3", cal.getTime()); } public void test_datelib_getPart() { doCompile("test_datelib_getPart"); Calendar cal = Calendar.getInstance(); cal.clear(); cal.setTimeZone(TimeZone.getTimeZone("GMT+1")); cal.set(2013, 5, 11, 14, 46, 34); cal.set(Calendar.MILLISECOND, 123); Date date = cal.getTime(); cal = Calendar.getInstance(); cal.setTime(date); // no time zone check("year1", cal.get(Calendar.YEAR)); check("month1", cal.get(Calendar.MONTH) + 1); check("day1", cal.get(Calendar.DAY_OF_MONTH)); check("hour1", cal.get(Calendar.HOUR_OF_DAY)); check("minute1", cal.get(Calendar.MINUTE)); check("second1", cal.get(Calendar.SECOND)); check("millisecond1", cal.get(Calendar.MILLISECOND)); cal.setTimeZone(TimeZone.getTimeZone("GMT+5")); // literal check("year2", cal.get(Calendar.YEAR)); check("month2", cal.get(Calendar.MONTH) + 1); check("day2", cal.get(Calendar.DAY_OF_MONTH)); check("hour2", cal.get(Calendar.HOUR_OF_DAY)); check("minute2", cal.get(Calendar.MINUTE)); check("second2", cal.get(Calendar.SECOND)); check("millisecond2", cal.get(Calendar.MILLISECOND)); // variable check("year3", cal.get(Calendar.YEAR)); check("month3", cal.get(Calendar.MONTH) + 1); check("day3", cal.get(Calendar.DAY_OF_MONTH)); check("hour3", cal.get(Calendar.HOUR_OF_DAY)); check("minute3", cal.get(Calendar.MINUTE)); check("second3", cal.get(Calendar.SECOND)); check("millisecond3", cal.get(Calendar.MILLISECOND)); } public void test_convertlib_cache() { // set default locale to en.US so the date is formatted uniformly on all systems Locale.setDefault(Locale.US); doCompile("test_convertlib_cache"); Calendar cal = Calendar.getInstance(); cal.set(2000, 6, 20, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); Date checkDate = cal.getTime(); final SimpleDateFormat format = new SimpleDateFormat(); format.applyPattern("yyyy MMM dd"); check("sdate1", format.format(new Date())); check("sdate2", format.format(new Date())); check("date01", checkDate); check("date02", checkDate); check("date03", checkDate); check("date04", checkDate); check("date11", checkDate); check("date12", checkDate); check("date13", checkDate); } public void test_convertlib_base64byte() { doCompile("test_convertlib_base64byte"); assertTrue(Arrays.equals((byte[])getVariable("base64input"), Base64.decode("The quick brown fox jumps over the lazy dog"))); } public void test_convertlib_base64byte_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){byte b = base64byte(null); return 0;}","test_convertlib_base64byte_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_bits2str() { doCompile("test_convertlib_bits2str"); check("bitsAsString1", "00000000"); check("bitsAsString2", "11111111"); check("bitsAsString3", "010100000100110110100000"); } public void test_convertlib_bits2str_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){string s = bits2str(null); return 0;}","test_convertlib_bits2str_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_bool2num() { doCompile("test_convertlib_bool2num"); check("resultTrue", 1); check("resultFalse", 0); } public void test_convertlib_bool2num_expect_error(){ // CLO-1255 // //this test should be expected to success in future // try { // doCompile("function integer transform(){integer s = bool2num(null);return 0;}","test_convertlib_bool2num_expect_error"); // fail(); // } catch (Exception e) { // // do nothing } public void test_convertlib_byte2base64() { doCompile("test_convertlib_byte2base64"); check("inputBase64", Base64.encodeBytes("Abeceda zedla deda".getBytes())); } public void test_convertlib_byte2base64_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){string s = byte2base64(null);return 0;}","test_convertlib_byte2base64_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_byte2hex() { doCompile("test_convertlib_byte2hex"); check("hexResult", "41626563656461207a65646c612064656461"); check("test_null", null); } public void test_convertlib_date2long() { doCompile("test_convertlib_date2long"); check("bornDate", BORN_MILLISEC_VALUE); check("zeroDate", 0l); } public void test_convertlib_date2long_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){long l = date2long(null);return 0;}","test_convertlib_date2long_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_date2num() { doCompile("test_convertlib_date2num"); Calendar cal = Calendar.getInstance(); cal.setTime(BORN_VALUE); check("yearDate", 1987); check("monthDate", 5); check("secondDate", 0); check("yearBorn", cal.get(Calendar.YEAR)); check("monthBorn", cal.get(Calendar.MONTH) + 1); //Calendar enumerates months from 0, not 1; check("secondBorn", cal.get(Calendar.SECOND)); check("yearMin", 1970); check("monthMin", 1); check("weekMin", 1); check("weekMinCs", 1); check("dayMin", 1); check("hourMin", 1); //TODO: check! check("minuteMin", 0); check("secondMin", 0); check("millisecMin", 0); } public void test_convertlib_date2num_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){number num = date2num(null,null); return 0;}","test_convertlib_date2num_expect_error"); fail(); } catch (Exception e) { // do nothing; } //this test should be expected to success in future try { doCompile("function integer transform(){number num = date2num(1982-09-02,null); return 0;}","test_convertlib_date2num_expect_error"); fail(); } catch (Exception e) { // do nothing; } //this test should be expected to success in future try { doCompile("function integer transform(){number num = date2num(null,year); return 0;}","test_convertlib_date2num_expect_error"); fail(); } catch (Exception e) { // do nothing; } } public void test_convertlib_date2str() { doCompile("test_convertlib_date2str"); check("inputDate", "1987:05:12"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy:MM:dd"); check("bornDate", sdf.format(BORN_VALUE)); SimpleDateFormat sdfCZ = new SimpleDateFormat("yyyy:MMMM:dd", MiscUtils.createLocale("cs.CZ")); check("czechBornDate", sdfCZ.format(BORN_VALUE)); SimpleDateFormat sdfEN = new SimpleDateFormat("yyyy:MMMM:dd", MiscUtils.createLocale("en")); check("englishBornDate", sdfEN.format(BORN_VALUE)); { String[] locales = {"en", "pl", null, "cs.CZ", null}; List<String> expectedDates = new ArrayList<String>(); for (String locale: locales) { expectedDates.add(new SimpleDateFormat("yyyy:MMMM:dd", MiscUtils.createLocale(locale)).format(BORN_VALUE)); } check("loopTest", expectedDates); } SimpleDateFormat sdfGMT8 = new SimpleDateFormat("yyyy:MMMM:dd z", MiscUtils.createLocale("en")); sdfGMT8.setTimeZone(TimeZone.getTimeZone("GMT+8")); check("timeZone", sdfGMT8.format(BORN_VALUE)); } public void test_convertlib_date2str_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){string s = date2str(null,'yyyy:MMMM:dd', 'cs.CZ', 'GMT+8');return 0;}","test_convertlib_date2str_expect_error"); fail(); } catch (Exception e) { // do nothing } try { doCompile("function integer transform(){string s = date2str(1985-11-12,null, 'cs.CZ', 'GMT+8');return 0;}","test_convertlib_date2str_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_decimal2double() { doCompile("test_convertlib_decimal2double"); check("toDouble", 0.007d); } public void test_convertlib_decimal2double_except_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){double d = decimal2double(null); return 0;}","test_convertlib_decimal2double_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_decimal2integer() { doCompile("test_convertlib_decimal2integer"); check("toInteger", 0); check("toInteger2", -500); check("toInteger3", 1000000); } public void test_convertlib_decimal2integer_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){integer i = decimal2integer(null); return 0;}","test_convertlib_decimal2integer_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_decimal2long() { doCompile("test_convertlib_decimal2long"); check("toLong", 0l); check("toLong2", -500l); check("toLong3", 10000000000l); } public void test_convertlib_decimal2long_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){long i = decimal2long(null); return 0;}","test_convertlib_decimal2long_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_double2integer() { doCompile("test_convertlib_double2integer"); check("toInteger", 0); check("toInteger2", -500); check("toInteger3", 1000000); } public void test_convertlib_double2integer_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){integer i = double2integer(null); return 0;}","test_convertlib_doublel2integer_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_double2long() { doCompile("test_convertlib_double2long"); check("toLong", 0l); check("toLong2", -500l); check("toLong3", 10000000000l); } public void test_convertlib_double2long_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){long l = double2long(null); return 0;}","test_convertlib_doublel2long_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_getFieldName() { doCompile("test_convertlib_getFieldName"); check("fieldNames",Arrays.asList("Name", "Age", "City", "Born", "BornMillisec", "Value", "Flag", "ByteArray", "Currency")); } public void test_convertlib_getFieldType() { doCompile("test_convertlib_getFieldType"); check("fieldTypes",Arrays.asList(DataFieldType.STRING.getName(), DataFieldType.NUMBER.getName(), DataFieldType.STRING.getName(), DataFieldType.DATE.getName(), DataFieldType.LONG.getName(), DataFieldType.INTEGER.getName(), DataFieldType.BOOLEAN.getName(), DataFieldType.BYTE.getName(), DataFieldType.DECIMAL.getName())); } public void test_convertlib_hex2byte() { doCompile("test_convertlib_hex2byte"); assertTrue(Arrays.equals((byte[])getVariable("fromHex"), BYTEARRAY_VALUE)); check("test_null", null); } public void test_convertlib_long2date() { doCompile("test_convertlib_long2date"); check("fromLong1", new Date(0)); check("fromLong2", new Date(50000000000L)); check("fromLong3", new Date(-5000L)); } public void test_convertlib_long2date_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){date d = long2date(null); return 0;}","test_convertlib_long2date_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_long2integer() { doCompile("test_convertlib_long2integer"); check("fromLong1", 10); check("fromLong2", -10); } public void test_convertlib_long2integer_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){integer i = long2integer(null); return 0;}","test_convertlib_long2integer_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_long2packDecimal() { doCompile("test_convertlib_long2packDecimal"); assertTrue(Arrays.equals((byte[])getVariable("packedLong"), new byte[] {5, 0, 12})); } public void test_convertlib_long2packDecimal_expect_error(){ //this test should be expected to success in future try { doCompile("function integer transform(){byte b = long2packDecimal(null); return 0;}","test_convertlib_long2packDecimal_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_md5() { doCompile("test_convertlib_md5"); assertTrue(Arrays.equals((byte[])getVariable("md5Hash1"), Digest.digest(DigestType.MD5, "The quick brown fox jumps over the lazy dog"))); assertTrue(Arrays.equals((byte[])getVariable("md5Hash2"), Digest.digest(DigestType.MD5, BYTEARRAY_VALUE))); assertTrue(Arrays.equals((byte[])getVariable("test_empty"), Digest.digest(DigestType.MD5, ""))); } public void test_convertlib_md5_expect_error(){ //CLO-1254 // //this test should be expected to success in future // try { // doCompile("function integer transform(){byte b = md5(null); return 0;}","test_convertlib_md5_expect_error"); // fail(); // } catch (Exception e) { // // do nothing } public void test_convertlib_num2bool() { doCompile("test_convertlib_num2bool"); check("integerTrue", true); check("integerFalse", false); check("longTrue", true); check("longFalse", false); check("doubleTrue", true); check("doubleFalse", false); check("decimalTrue", true); check("decimalFalse", false); } public void test_convertlib_num2bool_expect_error(){ //this test should be expected to success in future //test: integer try { doCompile("integer input; function integer transform(){input=null; boolean b = num2bool(input); return 0;}","test_convertlib_num2bool_expect_error"); fail(); } catch (Exception e) { // do nothing } //this test should be expected to success in future //test: long try { doCompile("long input; function integer transform(){input=null; boolean b = num2bool(input); return 0;}","test_convertlib_num2bool_expect_error"); fail(); } catch (Exception e) { // do nothing } //this test should be expected to success in future //test: double try { doCompile("double input; function integer transform(){input=null; boolean b = num2bool(input); return 0;}","test_convertlib_num2bool_expect_error"); fail(); } catch (Exception e) { // do nothing } //this test should be expected to success in future //test: decimal try { doCompile("decimal input; function integer transform(){input=null; boolean b = num2bool(input); return 0;}","test_convertlib_num2bool_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_convertlib_num2str() { System.out.println("num2str() test:"); doCompile("test_convertlib_num2str"); check("intOutput", Arrays.asList("16", "10000", "20", "10", "1.235E3", "12 350 001 Kcs")); check("longOutput", Arrays.asList("16", "10000", "20", "10", "1.235E13", "12 350 001 Kcs")); check("doubleOutput", Arrays.asList("16.16", "0x1.028f5c28f5c29p4", "1.23548E3", "12 350 001,1 Kcs")); check("decimalOutput", Arrays.asList("16.16", "1235.44", "12 350 001,1 Kcs")); check("test_null_dec", "NaN"); } public void test_converlib_num2str_expect_error(){ //this test should be expected to success in future //test: integer try { doCompile("integer input; function integer transform(){input=null; string str = num2str(input); return 0;}","test_converlib_num2str_expect_error"); fail(); } catch (Exception e) { // do nothing } //this test should be expected to success in future //test: long try { doCompile("long input; function integer transform(){input=null; string str = num2str(input); return 0;}","test_converlib_num2str_expect_error"); fail(); } catch (Exception e) { // do nothing } //this test should be expected to success in future //test: double try { doCompile("double input; function integer transform(){input=null; string str = num2str(input); return 0;}","test_converlib_num2str_expect_error"); fail(); } catch (Exception e) { // do nothing } // //this test should be expected to success in future // //test: decimal // try { // doCompile("decimal input; function integer transform(){input=null; string str = num2str(input); return 0;}","test_converlib_num2str_expect_error"); // fail(); // } catch (Exception e) { // // do nothing } public void test_convertlib_packdecimal2long() { doCompile("test_convertlib_packDecimal2long"); check("unpackedLong", PackedDecimal.parse(BYTEARRAY_VALUE)); } public void test_convertlib_sha() { doCompile("test_convertlib_sha"); assertTrue(Arrays.equals((byte[])getVariable("shaHash1"), Digest.digest(DigestType.SHA, "The quick brown fox jumps over the lazy dog"))); assertTrue(Arrays.equals((byte[])getVariable("shaHash2"), Digest.digest(DigestType.SHA, BYTEARRAY_VALUE))); } public void test_convertlib_sha256() { doCompile("test_convertlib_sha256"); assertTrue(Arrays.equals((byte[])getVariable("shaHash1"), Digest.digest(DigestType.SHA256, "The quick brown fox jumps over the lazy dog"))); assertTrue(Arrays.equals((byte[])getVariable("shaHash2"), Digest.digest(DigestType.SHA256, BYTEARRAY_VALUE))); } public void test_convertlib_str2bits() { doCompile("test_convertlib_str2bits"); //TODO: uncomment -> test will pass, but is that correct? assertTrue(Arrays.equals((byte[]) getVariable("textAsBits1"), new byte[] {0/*, 0, 0, 0, 0, 0, 0, 0*/})); assertTrue(Arrays.equals((byte[]) getVariable("textAsBits2"), new byte[] {-1/*, 0, 0, 0, 0, 0, 0, 0*/})); assertTrue(Arrays.equals((byte[]) getVariable("textAsBits3"), new byte[] {10, -78, 5/*, 0, 0, 0, 0, 0*/})); } public void test_convertlib_str2bool() { doCompile("test_convertlib_str2bool"); check("fromTrueString", true); check("fromFalseString", false); } public void test_convertlib_str2date() { doCompile("test_convertlib_str2date"); Calendar cal = Calendar.getInstance(); cal.set(2050, 4, 19, 0, 0, 0); cal.set(Calendar.MILLISECOND, 0); Date checkDate = cal.getTime(); check("date1", checkDate); check("date2", checkDate); cal.clear(); cal.setTimeZone(TimeZone.getTimeZone("GMT+8")); cal.set(2013, 04, 30, 17, 15, 12); check("withTimeZone1", cal.getTime()); cal.clear(); cal.setTimeZone(TimeZone.getTimeZone("GMT-8")); cal.set(2013, 04, 30, 17, 15, 12); check("withTimeZone2", cal.getTime()); assertFalse(getVariable("withTimeZone1").equals(getVariable("withTimeZone2"))); } public void test_convertlib_str2decimal() { doCompile("test_convertlib_str2decimal"); check("parsedDecimal1", new BigDecimal("100.13")); check("parsedDecimal2", new BigDecimal("123123123.123")); check("parsedDecimal3", new BigDecimal("-350000.01")); check("parsedDecimal4", new BigDecimal("1000000")); check("parsedDecimal5", new BigDecimal("1000000.99")); check("parsedDecimal6", new BigDecimal("123123123.123")); } public void test_convertlib_str2double() { doCompile("test_convertlib_str2double"); check("parsedDouble1", 100.13); check("parsedDouble2", 123123123.123); check("parsedDouble3", -350000.01); } public void test_convertlib_str2integer() { doCompile("test_convertlib_str2integer"); check("parsedInteger1", 123456789); check("parsedInteger2", 123123); check("parsedInteger3", -350000); check("parsedInteger4", 419); } public void test_convertlib_str2long() { doCompile("test_convertlib_str2long"); check("parsedLong1", 1234567890123L); check("parsedLong2", 123123123456789L); check("parsedLong3", -350000L); check("parsedLong4", 133L); } public void test_convertlib_toString() { doCompile("test_convertlib_toString"); check("integerString", "10"); check("longString", "110654321874"); check("doubleString", "1.547874E-14"); check("decimalString", "-6847521431.1545874"); check("listString", "[not ALI A, not ALI B, not ALI D..., but, ALI H!]"); check("mapString", "{1=Testing, 2=makes, 3=me, 4=crazy :-)}"); String byteMapString = getVariable("byteMapString").toString(); assertTrue(byteMapString.contains("1=value1")); assertTrue(byteMapString.contains("2=value2")); String fieldByteMapString = getVariable("fieldByteMapString").toString(); assertTrue(fieldByteMapString.contains("key1=value1")); assertTrue(fieldByteMapString.contains("key2=value2")); check("byteListString", "[firstElement, secondElement]"); check("fieldByteListString", "[firstElement, secondElement]"); } public void test_convertlib_str2byte() { doCompile("test_convertlib_str2byte"); checkArray("utf8Hello", new byte[] { 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33 }); checkArray("utf8Horse", new byte[] { 80, -59, -103, -61, -83, 108, 105, -59, -95, 32, -59, -66, 108, 117, -59, -91, 111, 117, -60, -115, 107, -61, -67, 32, 107, -59, -81, -59, -120, 32, 112, -60, -101, 108, 32, -60, -113, -61, -95, 98, 108, 115, 107, -61, -87, 32, -61, -77, 100, 121 }); checkArray("utf8Math", new byte[] { -62, -67, 32, -30, -123, -109, 32, -62, -68, 32, -30, -123, -107, 32, -30, -123, -103, 32, -30, -123, -101, 32, -30, -123, -108, 32, -30, -123, -106, 32, -62, -66, 32, -30, -123, -105, 32, -30, -123, -100, 32, -30, -123, -104, 32, -30, -126, -84, 32, -62, -78, 32, -62, -77, 32, -30, -128, -96, 32, -61, -105, 32, -30, -122, -112, 32, -30, -122, -110, 32, -30, -122, -108, 32, -30, -121, -110, 32, -30, -128, -90, 32, -30, -128, -80, 32, -50, -111, 32, -50, -110, 32, -30, -128, -109, 32, -50, -109, 32, -50, -108, 32, -30, -126, -84, 32, -50, -107, 32, -50, -106, 32, -49, -128, 32, -49, -127, 32, -49, -126, 32, -49, -125, 32, -49, -124, 32, -49, -123, 32, -49, -122, 32, -49, -121, 32, -49, -120, 32, -49, -119 }); checkArray("utf16Hello", new byte[] { -2, -1, 0, 72, 0, 101, 0, 108, 0, 108, 0, 111, 0, 32, 0, 87, 0, 111, 0, 114, 0, 108, 0, 100, 0, 33 }); checkArray("utf16Horse", new byte[] { -2, -1, 0, 80, 1, 89, 0, -19, 0, 108, 0, 105, 1, 97, 0, 32, 1, 126, 0, 108, 0, 117, 1, 101, 0, 111, 0, 117, 1, 13, 0, 107, 0, -3, 0, 32, 0, 107, 1, 111, 1, 72, 0, 32, 0, 112, 1, 27, 0, 108, 0, 32, 1, 15, 0, -31, 0, 98, 0, 108, 0, 115, 0, 107, 0, -23, 0, 32, 0, -13, 0, 100, 0, 121 }); checkArray("utf16Math", new byte[] { -2, -1, 0, -67, 0, 32, 33, 83, 0, 32, 0, -68, 0, 32, 33, 85, 0, 32, 33, 89, 0, 32, 33, 91, 0, 32, 33, 84, 0, 32, 33, 86, 0, 32, 0, -66, 0, 32, 33, 87, 0, 32, 33, 92, 0, 32, 33, 88, 0, 32, 32, -84, 0, 32, 0, -78, 0, 32, 0, -77, 0, 32, 32, 32, 0, 32, 0, -41, 0, 32, 33, -112, 0, 32, 33, -110, 0, 32, 33, -108, 0, 32, 33, -46, 0, 32, 32, 38, 0, 32, 32, 48, 0, 32, 3, -111, 0, 32, 3, -110, 0, 32, 32, 19, 0, 32, 3, -109, 0, 32, 3, -108, 0, 32, 32, -84, 0, 32, 3, -107, 0, 32, 3, -106, 0, 32, 3, -64, 0, 32, 3, -63, 0, 32, 3, -62, 0, 32, 3, -61, 0, 32, 3, -60, 0, 32, 3, -59, 0, 32, 3, -58, 0, 32, 3, -57, 0, 32, 3, -56, 0, 32, 3, -55 }); checkArray("macHello", new byte[] { 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33 }); checkArray("macHorse", new byte[] { 80, -34, -110, 108, 105, -28, 32, -20, 108, 117, -23, 111, 117, -117, 107, -7, 32, 107, -13, -53, 32, 112, -98, 108, 32, -109, -121, 98, 108, 115, 107, -114, 32, -105, 100, 121 }); checkArray("asciiHello", new byte[] { 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33 }); checkArray("isoHello", new byte[] { 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33 }); checkArray("isoHorse", new byte[] { 80, -8, -19, 108, 105, -71, 32, -66, 108, 117, -69, 111, 117, -24, 107, -3, 32, 107, -7, -14, 32, 112, -20, 108, 32, -17, -31, 98, 108, 115, 107, -23, 32, -13, 100, 121 }); checkArray("cpHello", new byte[] { 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33 }); checkArray("cpHorse", new byte[] { 80, -8, -19, 108, 105, -102, 32, -98, 108, 117, -99, 111, 117, -24, 107, -3, 32, 107, -7, -14, 32, 112, -20, 108, 32, -17, -31, 98, 108, 115, 107, -23, 32, -13, 100, 121 }); } public void test_convertlib_byte2str() { doCompile("test_convertlib_byte2str"); String hello = "Hello World!"; String horse = "Příliš žluťoučký kůň pěl ďáblské ódy"; String math = "½ ⅓ ¼ ⅕ ⅙ ⅛ ⅔ ⅖ ¾ ⅗ ⅜ ⅘ € ² ³ † × ← → ↔ ⇒ … ‰ Α Β – Γ Δ € Ε Ζ π ρ ς σ τ υ φ χ ψ ω"; check("utf8Hello", hello); check("utf8Horse", horse); check("utf8Math", math); check("utf16Hello", hello); check("utf16Horse", horse); check("utf16Math", math); check("macHello", hello); check("macHorse", horse); check("asciiHello", hello); check("isoHello", hello); check("isoHorse", horse); check("cpHello", hello); check("cpHorse", horse); } public void test_conditional_fail() { doCompile("test_conditional_fail"); check("result", 3); } public void test_expression_statement(){ // test case for issue 4174 doCompileExpectErrors("test_expression_statement", Arrays.asList("Syntax error, statement expected","Syntax error, statement expected")); } public void test_dictionary_read() { doCompile("test_dictionary_read"); check("s", "Verdon"); check("i", Integer.valueOf(211)); check("l", Long.valueOf(226)); check("d", BigDecimal.valueOf(239483061)); check("n", Double.valueOf(934.2)); check("a", new GregorianCalendar(1992, GregorianCalendar.AUGUST, 1).getTime()); check("b", true); byte[] y = (byte[]) getVariable("y"); assertEquals(10, y.length); assertEquals(89, y[9]); check("sNull", null); check("iNull", null); check("lNull", null); check("dNull", null); check("nNull", null); check("aNull", null); check("bNull", null); check("yNull", null); check("stringList", Arrays.asList("aa", "bb", null, "cc")); check("dateList", Arrays.asList(new Date(12000), new Date(34000), null, new Date(56000))); @SuppressWarnings("unchecked") List<byte[]> byteList = (List<byte[]>) getVariable("byteList"); assertDeepEquals(byteList, Arrays.asList(new byte[] {0x12}, new byte[] {0x34, 0x56}, null, new byte[] {0x78})); } public void test_dictionary_write() { doCompile("test_dictionary_write"); assertEquals(832, graph.getDictionary().getValue("i") ); assertEquals("Guil", graph.getDictionary().getValue("s")); assertEquals(Long.valueOf(540), graph.getDictionary().getValue("l")); assertEquals(BigDecimal.valueOf(621), graph.getDictionary().getValue("d")); assertEquals(934.2, graph.getDictionary().getValue("n")); assertEquals(new GregorianCalendar(1992, GregorianCalendar.DECEMBER, 2).getTime(), graph.getDictionary().getValue("a")); assertEquals(true, graph.getDictionary().getValue("b")); byte[] y = (byte[]) graph.getDictionary().getValue("y"); assertEquals(2, y.length); assertEquals(18, y[0]); assertEquals(-94, y[1]); assertEquals(Arrays.asList("xx", null), graph.getDictionary().getValue("stringList")); assertEquals(Arrays.asList(new Date(98000), null, new Date(76000)), graph.getDictionary().getValue("dateList")); @SuppressWarnings("unchecked") List<byte[]> byteList = (List<byte[]>) graph.getDictionary().getValue("byteList"); assertDeepEquals(byteList, Arrays.asList(null, new byte[] {(byte) 0xAB, (byte) 0xCD}, new byte[] {(byte) 0xEF})); check("assignmentReturnValue", "Guil"); } public void test_dictionary_write_null() { doCompile("test_dictionary_write_null"); assertEquals(null, graph.getDictionary().getValue("s")); assertEquals(null, graph.getDictionary().getValue("sVerdon")); assertEquals(null, graph.getDictionary().getValue("i") ); assertEquals(null, graph.getDictionary().getValue("i211") ); assertEquals(null, graph.getDictionary().getValue("l")); assertEquals(null, graph.getDictionary().getValue("l452")); assertEquals(null, graph.getDictionary().getValue("d")); assertEquals(null, graph.getDictionary().getValue("d621")); assertEquals(null, graph.getDictionary().getValue("n")); assertEquals(null, graph.getDictionary().getValue("n9342")); assertEquals(null, graph.getDictionary().getValue("a")); assertEquals(null, graph.getDictionary().getValue("a1992")); assertEquals(null, graph.getDictionary().getValue("b")); assertEquals(null, graph.getDictionary().getValue("bTrue")); assertEquals(null, graph.getDictionary().getValue("y")); assertEquals(null, graph.getDictionary().getValue("yFib")); } public void test_dictionary_invalid_key(){ doCompileExpectErrors("test_dictionary_invalid_key", Arrays.asList("Dictionary entry 'invalid' does not exist")); } public void test_dictionary_string_to_int(){ doCompileExpectErrors("test_dictionary_string_to_int", Arrays.asList("Type mismatch: cannot convert from 'string' to 'integer'","Type mismatch: cannot convert from 'string' to 'integer'")); } public void test_utillib_sleep() { long time = System.currentTimeMillis(); doCompile("test_utillib_sleep"); long tmp = System.currentTimeMillis() - time; assertTrue("sleep() function didn't pause execution "+ tmp, tmp >= 1000); } public void test_utillib_random_uuid() { doCompile("test_utillib_random_uuid"); assertNotNull(getVariable("uuid")); } public void test_stringlib_randomString(){ doCompile("string test; function integer transform(){test = randomString(1,3); return 0;}","test_stringlib_randomString"); assertNotNull(getVariable("test")); } public void test_stringlib_validUrl() { doCompile("test_stringlib_url"); check("urlValid", Arrays.asList(true, true, false, true, false, true)); check("protocol", Arrays.asList("http", "https", null, "sandbox", null, "zip")); check("userInfo", Arrays.asList("", "chuck:norris", null, "", null, "")); check("host", Arrays.asList("example.com", "server.javlin.eu", null, "cloveretl.test.scenarios", null, "")); check("port", Arrays.asList(-1, 12345, -2, -1, -2, -1)); check("path", Arrays.asList("", "/backdoor/trojan.cgi", null, "/graph/UDR_FileURL_SFTP_OneGzipFileSpecified.grf", null, "(sftp://test:test@koule/home/test/data-in/file2.zip)")); check("query", Arrays.asList("", "hash=SHA560;god=yes", null, "", null, "")); check("ref", Arrays.asList("", "autodestruct", null, "", null, "innerfolder2/URLIn21.txt")); } public void test_stringlib_escapeUrl() { doCompile("test_stringlib_escapeUrl"); check("escaped", "http://example.com/foo%20bar%5E"); check("unescaped", "http://example.com/foo bar^"); } public void test_stringlib_escapeUrl_unescapeUrl_expect_error(){ //test: escape - empty string try { doCompile("string test; function integer transform() {test = escapeUrl(''); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: escape - null string try { doCompile("string test; function integer transform() {test = escapeUrl(null); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: unescape - empty string try { doCompile("string test; function integer transform() {test = unescapeUrl(''); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: unescape - null try { doCompile("string test; function integer transform() {test = unescapeUrl(null); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: escape - invalid URL try { doCompile("string test; function integer transform() {test = escapeUrl('somewhere over the rainbow'); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } //test: unescpae - invalid URL try { doCompile("string test; function integer transform() {test = unescapeUrl('mister%20postman'); return 0;}","test_stringlib_escapeUrl_expect_error"); fail(); } catch (Exception e) { // do nothing } } public void test_stringlib_resolveParams() { doCompile("test_stringlib_resolveParams"); check("resultNoParams", "Special character representing new line is: \\n calling CTL function MESSAGE; $DATAIN_DIR=./data-in"); check("resultFalseFalseParams", "Special character representing new line is: \\n calling CTL function `uppercase(\"message\")`; $DATAIN_DIR=./data-in"); check("resultTrueFalseParams", "Special character representing new line is: \n calling CTL function `uppercase(\"message\")`; $DATAIN_DIR=./data-in"); check("resultFalseTrueParams", "Special character representing new line is: \\n calling CTL function MESSAGE; $DATAIN_DIR=./data-in"); check("resultTrueTrueParams", "Special character representing new line is: \n calling CTL function MESSAGE; $DATAIN_DIR=./data-in"); } public void test_utillib_getEnvironmentVariables() { doCompile("test_utillib_getEnvironmentVariables"); check("empty", false); } public void test_utillib_getJavaProperties() { String key1 = "my.testing.property"; String key2 = "my.testing.property2"; String value = "my value"; String value2; assertNull(System.getProperty(key1)); assertNull(System.getProperty(key2)); System.setProperty(key1, value); try { doCompile("test_utillib_getJavaProperties"); value2 = System.getProperty(key2); } finally { System.clearProperty(key1); assertNull(System.getProperty(key1)); System.clearProperty(key2); assertNull(System.getProperty(key2)); } check("java_specification_name", "Java Platform API Specification"); check("my_testing_property", value); assertEquals("my value 2", value2); } public void test_utillib_getParamValues() { doCompile("test_utillib_getParamValues"); Map<String, String> params = new HashMap<String, String>(); params.put("PROJECT", "."); params.put("DATAIN_DIR", "./data-in"); params.put("COUNT", "3"); params.put("NEWLINE", "\\n"); // special characters should NOT be resolved check("params", params); } public void test_utillib_getParamValue() { doCompile("test_utillib_getParamValue"); Map<String, String> params = new HashMap<String, String>(); params.put("PROJECT", "."); params.put("DATAIN_DIR", "./data-in"); params.put("COUNT", "3"); params.put("NEWLINE", "\\n"); // special characters should NOT be resolved params.put("NONEXISTING", null); check("params", params); } public void test_stringlib_getUrlParts() { doCompile("test_stringlib_getUrlParts"); List<Boolean> isUrl = Arrays.asList(true, true, true, true, false); List<String> path = Arrays.asList( "/users/a6/15e83578ad5cba95c442273ea20bfa/msf-183/out5.txt", "/data-in/fileOperation/input.txt", "/data/file.txt", "/data/file.txt", null); List<String> protocol = Arrays.asList("sftp", "sandbox", "ftp", "https", null); List<String> host = Arrays.asList( "ava-fileManipulator1-devel.getgooddata.com", "cloveretl.test.scenarios", "ftp.test.com", "www.test.com", null); List<Integer> port = Arrays.asList(-1, -1, 21, 80, -2); List<String> userInfo = Arrays.asList( "user%40gooddata.com:password", "", "test:test", "test:test", null); List<String> ref = Arrays.asList("", "", "", "", null); List<String> query = Arrays.asList("", "", "", "", null); check("isUrl", isUrl); check("path", path); check("protocol", protocol); check("host", host); check("port", port); check("userInfo", userInfo); check("ref", ref); check("query", query); check("isURL_empty", false); check("path_empty", null); check("protocol_empty", null); check("host_empty", null); check("port_empty", -2); check("userInfo_empty", null); check("ref_empty", null); check("query_empty", null); check("isURL_null", false); check("path_null", null); check("protocol_null", null); check("host_null", null); check("port_null", -2); check("userInfo_null", null); check("ref_null", null); check("query_empty", null); } public void test_randomlib_randomDate() { doCompile("test_randomlib_randomDate"); final long HOUR = 60L * 60L * 1000L; Date BORN_VALUE_NO_MILLIS = new Date(BORN_VALUE.getTime() / 1000L * 1000L); check("noTimeZone1", BORN_VALUE); check("noTimeZone2", BORN_VALUE_NO_MILLIS); check("withTimeZone1", new Date(BORN_VALUE_NO_MILLIS.getTime() + 2*HOUR)); // timezone changes from GMT+5 to GMT+3 check("withTimeZone2", new Date(BORN_VALUE_NO_MILLIS.getTime() - 2*HOUR)); // timezone changes from GMT+3 to GMT+5 } }
package ttaomae.trees; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.Arrays; import java.util.Collection; import java.util.Random; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import ttaomae.lists.List; @RunWith(Parameterized.class) public class BinarySearchTreeTest { private static final int ITERATIONS = 10_000; private static final long RANDOM_SEED = 0L; @Parameters public static Collection<Object[]> data() { return Arrays.asList(new Object[][] { { NonBalancingBinarySearchTree.class }, { RedBlackTree.class } }); } private Class<BinarySearchTree<Integer>> treeClass; private BinarySearchTree<Integer> tree; public BinarySearchTreeTest(Class<BinarySearchTree<Integer>> treeClass) { this.treeClass = treeClass; } @Before public void init() throws InstantiationException, IllegalAccessException { tree = treeClass.newInstance(); } @Test public void testAdd_null() { try { tree.add(null); fail(); } catch (IllegalArgumentException e) {} } @Test public void testAdd_singleItem() { assertEquals(0, tree.size()); assertTrue(tree.add(5)); assertTrue(tree.contains(new Integer(5))); assertFalse(tree.add(5)); } @Test public void testAdd_ascendingOrder() { for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.add(i)); assertEquals(i + 1, tree.size()); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.contains(new Integer(i))); assertFalse(tree.add(new Integer(i))); assertEquals(ITERATIONS, tree.size()); } } @Test public void testAdd_descendingOrder() { for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.add(ITERATIONS - i)); assertEquals(i + 1, tree.size()); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.contains(new Integer(ITERATIONS - i))); assertFalse(tree.add(new Integer(ITERATIONS - i))); assertEquals(ITERATIONS, tree.size()); } } @Test public void testAdd_randomOrder() { Integer[] addOrder = getRandomArray(ITERATIONS, RANDOM_SEED); for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.add(addOrder[i])); assertEquals(i + 1, tree.size()); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.contains(new Integer(addOrder[i]))); assertFalse(tree.add(addOrder[i])); assertEquals(ITERATIONS, tree.size()); } } @Test public void testRemove_randomAdd_randomRemove() { Random rng = new Random(RANDOM_SEED); Integer[] addOrder = getRandomArray(ITERATIONS, RANDOM_SEED); for (int n = 0; n < ITERATIONS; n++) { // perform "inside-out" Fisher-Yates shuffle // pick random location int i = rng.nextInt(n + 1); // if necessary, move whatever is at that location to the end of the // sublist if (i != n) { addOrder[n] = addOrder[i]; } // place next number at random location addOrder[i] = n; } for (int i = 0; i < ITERATIONS; i++) { tree.add(addOrder[i]); } Integer[] removeOrder = getRandomArray(ITERATIONS, RANDOM_SEED + 1); // check that remove method actually removes items for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.remove(new Integer(removeOrder[i]))); assertEquals(ITERATIONS - 1 - i, tree.size()); assertFalse(tree.contains(new Integer(removeOrder[i]))); } // check that remove returns false if the number doesn't exist for (int i = 0; i < ITERATIONS; i++) { assertFalse(tree.remove(new Integer(i))); assertEquals(0, tree.size()); } } @Test public void testRemove_ascendingAdd_ascendingRemove() { for (int i = 0; i < ITERATIONS; i++) { tree.add(i); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.remove(new Integer(i))); assertEquals(ITERATIONS - 1 - i, tree.size()); assertFalse(tree.contains(new Integer(i))); } } @Test public void testRemove_ascendingAdd_descendingRemove() { for (int i = 0; i < ITERATIONS; i++) { tree.add(i); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.remove(new Integer(ITERATIONS - 1 - i))); assertEquals(ITERATIONS - 1 - i, tree.size()); assertFalse(tree.contains(new Integer(ITERATIONS - i))); } } @Test public void testRemove_decendingAdd_descendingRemove() { for (int i = 0; i < ITERATIONS; i++) { tree.add(ITERATIONS - i); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.remove(new Integer(ITERATIONS - i))); assertEquals(ITERATIONS - 1 - i, tree.size()); assertFalse(tree.contains(new Integer(ITERATIONS - i))); } } @Test public void testRemove_decendingAdd_ascendingRemove() { for (int i = 0; i < ITERATIONS; i++) { tree.add(ITERATIONS - i); } for (int i = 0; i < ITERATIONS; i++) { assertTrue(tree.remove(new Integer(i + 1))); assertEquals(ITERATIONS - 1 - i, tree.size()); assertFalse(tree.contains(new Integer(i))); } } @Test public void testInOrderTraversal() { Integer[] addOrder = getRandomArray(ITERATIONS, RANDOM_SEED); for (int i = 0; i < ITERATIONS; i++) { tree.add(addOrder[i]); } List<Integer> sortedList = tree.inOrderTraversal(); for (int i = 0; i < sortedList.size() - 1; i++) { assertTrue(sortedList.get(i).compareTo(sortedList.get(i + 1)) < 0); } } /** * Returns an array containing the numbers 0 (inclusive) up to n (exclusive) * in a random order, using the specified random seed. */ private Integer[] getRandomArray(int n, long seed) { assert (n > 0); Integer[] result = new Integer[n]; Random rng = new Random(seed); for (int i = 0; i < n; i++) { // perform "inside-out" Fisher-Yates shuffle // pick random location int j = rng.nextInt(i + 1); // if necessary, move whatever is at that location to the end of the // sublist if (j != i) { result[i] = result[j]; } // place next number at random location result[j] = i; } return result; } }
package uk.org.ponder.rsf.components; /** * Backs a selection control of some kind, where named values are presented in a * list to the user. The returned value which is submitted may be a single * selection, multiple selection, or empty, depending on the component type in * the <code>selection</code> field. * <p> * The value binding <code>valuebinding</code> in the superclass, if * non-empty, will retrieve an object list, which will be supplied during fixup * to the resolving beans referenced by <code>nameresolver</code> (which must * not be empty in this case) and <code>idresolver</code>. If idresolver is * empty, the list is assumed to be a list of IDs already. * * @return */ public class UISelect extends UIBoundList { public static final String NAMES_ID_SUFFIX = "-names"; public static final String SELECTION_ID_SUFFIX = "-selection"; /** A component representing the rendered labels for the list control */ public UIBoundList names; /** * The input component representing the actual selected value. Expected to be * either UIInput or UIInputMany */ public UIBound selection; /** Creates a non-submitting (output-only) selection control */ public static UISelect make(UIContainer tofill, String ID, String[] values, String[] labels, String value) { UISelect togo = new UISelect(); togo.ID = ID; togo.setValue(values); togo.names = new UIBoundList(); togo.ID = ID + NAMES_ID_SUFFIX; togo.names.setValue(labels); togo.selection = new UIOutput(); togo.selection.ID = ID + SELECTION_ID_SUFFIX; ((UIOutput) togo.selection).setValue(value); tofill.addComponent(togo); return togo; } }
package org.jenetics.util; public final class math { private math() { object.noInstanceOf(math.class); } /** * Add to long values and throws an ArithmeticException in the case of an * overflow. * * @param a the first summand. * @param b the second summand. * @return the sum of the given values. * @throws ArithmeticException if the summation would lead to an overflow. */ public static long plus(final long a, final long b) { if (a == Long.MIN_VALUE && b == Long.MIN_VALUE) { throw new ArithmeticException(String.format("Overflow: %d + %d", a, b)); } final long z = a + b; if (a > 0) { if (b > 0 && z < 0) { throw new ArithmeticException(String.format("Overflow: %d + %d", a, b)); } } else if (b < 0 && z > 0) { throw new ArithmeticException(String.format("Overflow: %d + %d", a, b)); } return z; } /** * Subtracts to long values and throws an ArithmeticException in the case of an * overflow. * * @param a the minuend. * @param b the subtrahend. * @return the difference of the given values. * @throws ArithmeticException if the subtraction would lead to an overflow. */ public static long minus(final long a, final long b) { final long z = a - b; if (a > 0) { if (b < 0 && z < 0) { throw new ArithmeticException(String.format("Overflow: %d - %d", a, b)); } } else if (b > 0 && z > 0) { throw new ArithmeticException(String.format("Overflow: %d - %d", a, b)); } return z; } public static double sum(final double[] values) { double sum = 0.0; double c = 0.0; double y = 0.0; double t = 0.0; for (int i = values.length; --i >= 0;) { y = values[i] - c; t = sum + y; c = t - sum - y; sum = t; } return sum; } /** * Add the values of the given array. * * @param values the values to add. * @return the values sum. * @throws NullPointerException if the values are null; */ public static long sum(final long[] values) { long sum = 0; for (int i = values.length; --i >= 0;) { sum += values[i]; } return sum; } /** * Normalize the given double array, so that it sum to one. The normalization * is performed in place and the same {@code values} are returned. * * @param values the values to normalize. * @return the {@code values} array. * @throws NullPointerException if the given double array is {@code null}. */ public static double[] normalize(final double[] values) { final double sum = 1.0/sum(values); for (int i = values.length; --i >= 0;) { values[i] = values[i]*sum; } return values; } /** * Return the minimum value of the given double array. * * @param values the double array. * @return the minimum value or {@link Double#NaN} if the given array is empty. * @throws NullPointerException if the given array is {@code null}. */ public static double min(final double[] values) { double min = Double.NaN; if (values.length > 0) { min = values[0]; for (int i = values.length; --i >= 1;) { if (values[i] < min) { min = values[i]; } } } return min; } /** * Return the maximum value of the given double array. * * @param values the double array. * @return the maximum value or {@link Double#NaN} if the given array is empty. * @throws NullPointerException if the given array is {@code null}. */ public static double max(final double[] values) { double max = Double.NaN; if (values.length > 0) { max = values[0]; for (int i = values.length; --i >= 1;) { if (values[i] > max) { max = values[i]; } } } return max; } /** * <i>Clamping</i> a value between a pair of boundary values. * <i>Note: using clamp with floating point numbers may give unexpected * results if one of the values is {@code NaN}.</i> * * @param v the value to <i>clamp</i> * @param lo the lower bound. * @param hi the upper bound. * @return The clamped value: * <ul> * <li>{@code lo if v < lo}</li> * <li>{@code hi if hi < v}</li> * <li>{@code otherwise, v}</li> * </ul> */ public static double clamp(final double v, final double lo, final double hi) { return v < lo ? lo : (v > hi ? hi : v); } /** * Component wise multiplication of the given double array. * * @param values the double values to multiply. * @param multiplier the multiplier. * @throws NullPointerException if the given double array is {@code null}. */ public static void times(final double[] values, final double multiplier) { for (int i = values.length; --i >= 0;) { values[i] *= multiplier; } } /** * Component wise division of the given double array. * * @param values the double values to divide. * @param divisor the divisor. * @throws NullPointerException if the given double array is {@code null}. */ public static void divide(final double[] values, final double divisor) { for (int i = values.length; --i >= 0;) { values[i] /= divisor; } } /** * Binary exponentiation algorithm. * * @param b the base number. * @param e the exponent. * @return {@code b^e}. */ public static long pow(final long b, final long e) { if (e < 0) { throw new IllegalArgumentException(String.format( "Exponent is negative: %d", e )); } long base = b; long exp = e; long result = 1; while (exp != 0) { if ((exp & 1) != 0) { result *= base; } exp >>>= 1; base *= base; } return result; } static int gcd(final int a, final int b) { int x = a; int y = b; int mod = x%y; while (mod != 0) { x = y; y = mod; mod = x%y; } return y; } static boolean isMultiplicationSave(final int a, final int b) { final long m = (long)a*(long)b; return m >= Integer.MIN_VALUE && m <= Integer.MAX_VALUE; } public static long ulpDistance(final double a, final double b) { return minus(ulpPosition(a), ulpPosition(b)); } public static long ulpPosition(final double a) { long t = Double.doubleToLongBits(a); if (t < 0) { t = Long.MIN_VALUE - t; } return t; } static final class probability { private probability() { object.noInstanceOf(probability.class); } static final long INT_RANGE = pow(2, 32) - 1; /** * Maps the probability, given in the range {@code [0, 1]}, to an * integer in the range {@code [Integer.MIN_VALUE, Integer.MAX_VALUE]}. * * @param probability the probability to widen. * @return the widened probability. */ static int toInt(final double probability) { return (int)(Math.round(INT_RANGE*probability) + Integer.MIN_VALUE); } } public static final class random { private random() { object.noInstanceOf(random.class); } /** * Create a new <em>seed</em> byte array of the given length. * * @see #seed(byte[]) * @see #seed() * * @param length the length of the returned byte array. * @return a new <em>seed</em> byte array of the given length * @throws NegativeArraySizeException if the given length is smaller * than zero. */ public static byte[] seedBytes(final int length) { return seed(new byte[length]); } public static byte[] seed(final byte[] seed) { for (int i = 0, len = seed.length; i < len;) { int n = Math.min(len - i, Long.SIZE/Byte.SIZE); for (long x = seed(); n-- > 0; x >>= Byte.SIZE) { seed[i++] = (byte)x; } } return seed; } public static long seed() { return seed(nanoTimeSeed()); } /** * Uses the given {@code base} value to create a reasonable safe seed value * by combining it with values of {@code new Object().hashCode()}: * <p/> * [code] * public static long seed(final long base) { * final long hashSeed = ((long)(new Object().hashCode()) << 32) | * new Object().hashCode(); * long seed = base ^ hashSeed; * seed ^= seed << 17; * seed ^= seed >>> 31; * seed ^= seed << 8; * return seed; * } * [/code] * * @param base the base value of the seed to create * @return the created seed value. */ public static long seed(final long base) { long seed = base ^ objectHashSeed(); seed ^= seed << 17; seed ^= seed >>> 31; seed ^= seed << 8; return seed; } private static long objectHashSeed() { return ((long)(new Object().hashCode()) << 32) | new Object().hashCode(); } private static long nanoTimeSeed() { return ((System.nanoTime() & 255) << 56) | ((System.nanoTime() & 255) << 24) | ((System.nanoTime() & 255) << 48) | ((System.nanoTime() & 255) << 16) | ((System.nanoTime() & 255) << 40) | ((System.nanoTime() & 255) << 8) | ((System.nanoTime() & 255) << 32) | ((System.nanoTime() & 255) << 0); } } }
import java.util.ArrayList; import java.util.List; public class SnakeGame { private final int[] boardSize; List<int[]> snake; BLOCK board[][] ; public SnakeGame(int[] boardSize) { this.boardSize = boardSize; snake = new ArrayList<>(); board = new BLOCK[boardSize[0]][boardSize[1]]; for (int row = 0; row < boardSize[0]; row++){ for (int col= 0; col < boardSize[1]; col++){ board[row][col] = BLOCK.IS_EMPTY; } } } public String getBoardString() { StringBuilder sb = new StringBuilder(); for (int col = 0;col < boardSize[1];col++){ sb.append("."); } sb.append(System.lineSeparator()); String line = sb.toString(); sb = new StringBuilder(); for (int row = 0; row <boardSize[0];row++){ sb.append(line); } return sb.toString(); } public BLOCK at(int[] pos) { return board[pos[0]][pos[1]]; } public void startSnakeAt(int[] pos) { snake.add(pos); board[pos[0]][pos[1]] = BLOCK.IS_SNAKE; } public void moveSnake(MOVE direction) { int[] pos = snake.get(0); int[] newPos = pos; switch (direction){ case UP: newPos = new int[] {pos[0]+1,pos[1]}; if (newPos[0]>=boardSize[0]){ return; } break; case DOWN: newPos = new int[] {pos[0]-1,pos[1]}; if (newPos[0]<0){ return; } break; case LEFT: newPos = new int[] {pos[0],pos[1]+1}; if (newPos[1]>=boardSize[1]){ return; } break; case RIGHT: newPos = new int[] {pos[0],pos[1]-1}; if (newPos[1]<0){ return; } break; } snake.set(0,newPos); board[pos[0]][pos[1]] = BLOCK.IS_EMPTY; board[newPos[0]][newPos[1]] = BLOCK.IS_SNAKE; } public String toString(){ StringBuilder sb = new StringBuilder(); for (int row = boardSize[0] -1;row >= 0; row for (int col= 0; col < boardSize[1]; col++){ switch (board[row][col]){ case IS_SNAKE: sb.append('S'); break; default: // IS_EMPTY: sb.append('.'); } } sb.append(System.lineSeparator()); } return sb.toString(); } } enum BLOCK { IS_EMPTY, IS_SNAKE // IS_WALL,IS_SNAKE } enum MOVE{ UP,DOWN,LEFT,RIGHT; }
package big.marketing; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.Properties; import org.apache.log4j.Logger; public class Settings { static Logger logger = Logger.getLogger(Settings.class); private static Properties properties = new Properties(); private static final String configFile = "config/eyeNet.properties"; private static final String defaultConfigFile = configFile + ".template"; private static final String logConfigFile = "config/log4j.properties"; private static final String defaultLogConfigFile = logConfigFile + ".template"; public static String get(String key) { return properties.getProperty(key); } public static int getInt(String key) { return Integer.parseInt(get(key)); } public static void loadConfig() { boolean defaultConfigOK = loadConfigFile(defaultConfigFile); // this enables hierarchical properties: // eyeNet.properties doesn't have to contain all settings, only the ones different from default // settings not existing in eyeNet.properties are getting the default value Properties defaults = properties; properties = new Properties(defaults); boolean userConfigOK = loadConfigFile(configFile); if (!userConfigOK) { if (!defaultConfigOK) { logger.fatal("Could not load any config, exiting..."); System.exit(1); } else { // no user config file existing, saving default config as user config try { defaults.store(new FileWriter(configFile), ""); } catch (IOException e) { logger.warn("Could not write config file to " + configFile); } } } } private static boolean loadConfigFile(String fileName) { try { properties.load(new FileReader(fileName)); logger.info("Successfully read config from file " + fileName); return true; } catch (IOException e) { logger.warn("Could not load Settings from " + fileName); return false; } } }
package MWC.GenericData; import java.io.Serializable; public final class WorldArea implements Serializable { // member variables // keep track of versions static final long serialVersionUID = 1; /** * BottomRight (South-East) corner */ private WorldLocation _bottomRight; /** * TopLeft (North-West) corner */ private WorldLocation _topLeft; /** * top right corner */ private WorldLocation _topRight; /** * bottom left corner */ private WorldLocation _bottomLeft; /** * use internal WorldLocation for centre, to stop us having to create a new * one so frequently */ private WorldLocation _thisCentre = null; // constructor /** * Constructor, takes the two coordinates. They don't really have to be the * right way round, since we do a normalise anyway. * * @see #normalise */ public WorldArea(final WorldLocation TopLeftVal, final WorldLocation BottomRightVal) { // remember to normalise _topLeft = new WorldLocation(TopLeftVal); _bottomRight = new WorldLocation(BottomRightVal); _topRight = new WorldLocation(_topLeft); _bottomLeft = new WorldLocation(_bottomRight); normalise(); } /** * other constructor, takes copy of coordinates in target area */ public WorldArea(final WorldArea other) { this(other._topLeft, other._bottomRight); } // member functions /** * function to determine the centre of the area * * @return WorldLocation - the centre of the Area */ final public WorldLocation getCentre() { final double dLat = getHeight() / 2.0; final double dLong = (_bottomRight.getLong() - _topLeft.getLong()) / 2.0; final double dDepth = getDepthRange() / 2.0; // do we need to create our centre? if (_thisCentre == null) { _thisCentre = new WorldLocation(0, 0, 0); } // update properties of centre _thisCentre.setLat(_bottomRight.getLat() + dLat); _thisCentre.setLong(_topLeft.getLong() + dLong); _thisCentre.setDepth(_bottomRight.getDepth() + dDepth); return _thisCentre; } /** * function to shift the centre of the area * * @param newCentre * the new centre of the area */ final public void setCentre(final WorldLocation newCentre) { // what's the area final double wid = this.getFlatEarthWidth(); final double ht = this.getHeight(); final double depth = this.getCentre().getDepth(); final WorldLocation newTL = new WorldLocation(newCentre.getLat() + ht / 2, newCentre.getLong() - wid / 2, depth); final WorldLocation newBR = new WorldLocation(newCentre.getLat() - ht / 2, newCentre.getLong() + wid / 2, depth); _topLeft = newTL; _bottomRight = newBR; _thisCentre = null; } /** * function to return the centre of the area (at zero depth) * * @return WorldLocation the zero-depth centre of the area */ final public WorldLocation getCentreAtSurface() { final WorldLocation res = getCentre(); res.setDepth(0); return res; } final public WorldLocation getTopLeft() { return _topLeft; } final public WorldLocation getBottomRight() { return _bottomRight; } final public void setBottomRight(final WorldLocation loc) { _bottomRight = loc; } final public void setTopLeft(final WorldLocation loc) { _topLeft = loc; } final public WorldLocation getTopRight() { return _topRight; // return new WorldLocation(_topLeft.getLat(), _bottomRight.getLong(), // _topLeft.getDepth()); } final public WorldLocation getBottomLeft() { return _bottomLeft; // return new WorldLocation(_bottomRight.getLat(), _topLeft.getLong(), // _bottomRight.getDepth()); } final public String toString() { String res = ""; res += " Area TL:" + _topLeft + " BR:" + _bottomRight + " (" + _topLeft.getLong() + ") "; return res; } /** * see if the WorldLocation is in this area * * @return flag for contains */ final public boolean contains(final WorldLocation other) { boolean res = true; if (this._topLeft._theLat < other._theLat) res = false; else if (this._topLeft._theLong > other._theLong) res = false; else if (this._bottomRight._theLat > other._theLat) res = false; else if (this._bottomRight._theLong < other._theLong) res = false; else if (this._topLeft._theDepth < other._theDepth) res = false; else if (this._bottomRight._theDepth > other._theDepth) res = false; return res; } /** * find the range of the nearest corner (or the centre point) to the indicated * point */ final public double rangeFrom(final WorldLocation other) { // first the TL/BR corners final double r1 = getTopLeft().rangeFrom(other); final double r2 = getBottomRight().rangeFrom(other); // now the centre final double r3 = getCentre().rangeFrom(other); // now the TR / BL corners final double r4 = getTopRight().rangeFrom(other); final double r5 = getBottomLeft().rangeFrom(other); // System.out.print("ranges:" + (int)(10000d * r1)); // System.out.println(", " + (int)(r2 *10000d)+ ", " + // (int)(r3*10000d) + ", " + (int)(r4*10000d) + ", " + (int)(r5*10000d)); double res = Math.min(r1, r2); res = Math.min(res, r3); res = Math.min(res, r4); res = Math.min(res, r5); // sort out the min return res; } /** * find the range of the nearest corner (or the centre point) to the indicated * point */ final public double rangeFromEdge(final WorldLocation other) { double res; if (this.contains(other)) res = 0; else { // first the TL/BR corners final double r1 = distancePointLine(getTopLeft(), getTopRight(), other); final double r2 = distancePointLine(getTopRight(), getBottomRight(), other); final double r3 = distancePointLine(getBottomRight(), getBottomLeft(), other); final double r4 = distancePointLine(getBottomLeft(), getTopLeft(), other); res = Math.min(r1, r2); res = Math.min(res, r3); res = Math.min(res, r4); } // sort out the min return res; } /** * Computes the distance from a point p to a line segment AB * This is a modified version of the sources from the Java Topology Suite * * Note: NON-ROBUST! * * @param p the point to compute the distance for * @param A one point of the line * @param B another point of the line (must be different to A) * @return the distance from p to line segment AB */ private static double distancePointLine(final WorldLocation A, final WorldLocation B, final WorldLocation p) { // if start==end, then use pt distance if ( A.equals(B) ) return p.rangeFrom(A); // otherwise use comp.graphics.algorithms Frequently Asked Questions method /** NOTE: we use getLat for .y and getLong for .x */ final double r = ( (p.getLong() - A.getLong()) * (B.getLong() - A.getLong()) + (p.getLat() - A.getLat()) * (B.getLat() - A.getLat()) ) / ( (B.getLong() - A.getLong()) * (B.getLong() - A.getLong()) + (B.getLat() - A.getLat()) * (B.getLat() - A.getLat()) ); if (r <= 0.0) return p.rangeFrom(A); if (r >= 1.0) return p.rangeFrom(B); final double s = ((A.getLat() - p.getLat()) *(B.getLong() - A.getLong()) - (A.getLong() - p.getLong())*(B.getLat() - A.getLat()) ) / ((B.getLong() - A.getLong()) * (B.getLong() - A.getLong()) + (B.getLat() - A.getLat()) * (B.getLat() - A.getLat()) ); return Math.abs(s) * Math.sqrt(((B.getLong() - A.getLong()) * (B.getLong() - A.getLong()) + (B.getLat() - A.getLat()) * (B.getLat() - A.getLat()))); } /** * Returns the distance of p3 to the segment defined by p1,p2; * * @return The distance of p3 to the segment defined by p1,p2 */ // private static double distanceToSegment(WorldLocation p1, WorldLocation p2, // WorldLocation p3) // final double xDelta = p2.getLong() - p1.getLong(); // final double yDelta = p2.getLat() - p1.getLat(); // final double res; // if ((xDelta == 0) && (yDelta == 0)) // // ok, the two corners are the same, we just treat this as a single point // res = p1.rangeFrom(p3); // else // final double u = ((p3.getLong() - p1.getLong()) * xDelta + (p3.getLat() - p1 // .getLat()) * yDelta) // / (xDelta * xDelta + yDelta * yDelta); // final WorldLocation closestPoint; // if (u < 0) // closestPoint = p1; // else if (u > 1) // closestPoint = p2; // else // closestPoint = new WorldLocation(p1.getLat() + u * yDelta, p1.getLong() // + u * xDelta, 0); // res = closestPoint.rangeFrom(p3); // return res; /** * see if the two areas overlap * * @return flag for overlap */ final public boolean overlaps(final WorldArea other) { boolean res = true; // see if the bottom left/top right overlap if (other._bottomRight._theLat > this._topLeft._theLat) res = false; else if (other._topLeft._theLat < this._bottomRight._theLat) res = false; else if (other._bottomRight._theLong < this._topLeft._theLong) res = false; else if (other._topLeft._theLong > this._bottomRight._theLong) res = false; return res; } /** * grow the area to include a border or the indicated degrees */ final public void grow(final double border_degs, final double depth_metres) { _topLeft.setLat(_topLeft.getLat() + border_degs); _topLeft.setLong(_topLeft.getLong() - border_degs); _topLeft.setDepth(_topLeft.getDepth() + depth_metres); _bottomRight.setLat(_bottomRight.getLat() - border_degs); _bottomRight.setLong(_bottomRight.getLong() + border_degs); _bottomRight.setDepth(_bottomRight.getDepth() - depth_metres); } /** * extend the area to include this new point. includes a normalise operation */ final public void extend(final WorldLocation newPoint) { // check there is a valid point if (newPoint == null) return; if (this.contains(newPoint)) return; // set the limits by hand // first the areas _topLeft._theLat = Math.max(_topLeft._theLat, newPoint._theLat); _topLeft._theLong = Math.min(_topLeft._theLong, newPoint._theLong); _bottomRight._theLat = Math.min(_bottomRight._theLat, newPoint._theLat); _bottomRight._theLong = Math.max(_bottomRight._theLong, newPoint._theLong); _topRight._theLat = Math.max(_topRight._theLat, newPoint._theLat); _topRight._theLong = Math.max(_topRight._theLong, newPoint._theLong); _bottomLeft._theLat = Math.min(_bottomLeft._theLat, newPoint._theLat); _bottomLeft._theLong = Math.min(_bottomLeft._theLong, newPoint._theLong); // now the depths - if this point has a valid depth if (newPoint.hasValidDepth()) { _topLeft._theDepth = Math.max(_topLeft._theDepth, newPoint._theDepth); _bottomRight._theDepth = Math.min(_bottomRight._theDepth, newPoint._theDepth); _topRight._theDepth = Math.max(_topLeft._theDepth, newPoint._theDepth); _bottomLeft._theDepth = Math.min(_bottomRight._theDepth, newPoint._theDepth); } } /** * extend the area to include this new area. Includes a normalise operation */ final public void extend(final WorldArea newArea) { // check we've received a valid area if (newArea == null) return; // extend for each corner of the incoming area extend(newArea._topLeft); extend(newArea._bottomRight); } /** * make sure the corners are the correct way around. */ final public void normalise() { double maxLat, maxLong, maxDepth; double minLat, minLong, minDepth; // working variables if (_topLeft._theLat > _bottomRight._theLat) { maxLat = _topLeft._theLat; minLat = _bottomRight._theLat; } else { maxLat = _bottomRight._theLat; minLat = _topLeft._theLat; } if (_topLeft._theLong > _bottomRight._theLong) { maxLong = _topLeft._theLong; minLong = _bottomRight._theLong; } else { maxLong = _bottomRight._theLong; minLong = _topLeft._theLong; } if (_topLeft._theDepth > _bottomRight._theDepth) { maxDepth = _topLeft._theDepth; minDepth = _bottomRight._theDepth; } else { maxDepth = _bottomRight._theDepth; minDepth = _topLeft._theDepth; } /* * maxLat = Math.max(_topLeft._theLat, _bottomRight._theLat); minLong = * Math.min(_topLeft._theLong, _bottomRight._theLong); minLat = * Math.min(_topLeft._theLat, _bottomRight._theLat); maxLong = * Math.max(_topLeft._theLong, _bottomRight._theLong); maxDepth = * Math.max(_topLeft._theDepth, _bottomRight._theDepth); minDepth = * Math.min(_topLeft._theDepth, _bottomRight._theDepth); */ // update the values _topLeft._theLat = maxLat; _topLeft._theLong = minLong; _topLeft._theDepth = maxDepth; _bottomRight._theLat = minLat; _bottomRight._theLong = maxLong; _bottomRight._theDepth = minDepth; _topRight._theLat = maxLat; _topRight._theLong = maxLong; _topRight._theDepth = maxDepth; _bottomLeft._theLat = minLat; _bottomLeft._theLong = minLong; _bottomLeft._theDepth = minDepth; } final public boolean equals(final Object tt) { if (!(tt instanceof WorldArea)) return false; final WorldArea o = (WorldArea) tt; boolean res = true; if (!o.getTopLeft().equals(getTopLeft())) res = false; if (!o.getBottomRight().equals(getBottomRight())) res = false; return res; } /** * get the width of this area in absolute degrees - not degrees at the equator * * @return width in degrees at this point of latitide */ final public double getFlatEarthWidth() { return _bottomRight.getLong() - _topLeft.getLong(); } /** * return the width of the area (in degrees) */ final public double getWidth() { /** * note we don't just return the value in degrees, we switch the width of * this area to give a value in degrees at the equator */ // determine the mid-latitude double midLat = getHeight() / 2.0; midLat = _bottomRight.getLat() + midLat; // create the points we are going to work with final WorldLocation leftPoint = new WorldLocation(midLat, _topLeft.getLong(), 0.0); final WorldLocation rightPoint = new WorldLocation(midLat, _bottomRight.getLong(), 0.0); // and calculate the range final double res = leftPoint.rangeFrom(rightPoint); return res; } /** * return the height of the area (in degrees) */ final public double getHeight() { return _topLeft.getLat() - _bottomRight.getLat(); } final public double getDepthRange() { return _topLeft.getDepth() - _bottomRight.getDepth(); } /** * convenience method used to produce a distribution of locations within this * area. Using the total number of locations, this method will use the counter * to calculate ths location of this point * * @param counter * - how far we are through the distribution * @param total * - the total number we are going to produce * @return this location in the distribution */ final public WorldLocation getDistributedLocation(final int counter, final int total) { WorldLocation res = null; // what's our area in metres? final double widthMetres = MWC.Algorithms.Conversions.Degs2m(this .getWidth()); final double heightMetres = MWC.Algorithms.Conversions.Degs2m(this .getHeight()); final double areaMetres = widthMetres * heightMetres; // what's the area of each cell final double cellArea = areaMetres / total; // and what's their width? final double spacing = Math.sqrt(cellArea); // ok, how many will fit across is this? final int acrossSpacing = (int) (widthMetres / spacing); // divide counter by this int numDown, numAcross; if (acrossSpacing > 0) { numDown = counter / acrossSpacing; numAcross = counter - (numDown * acrossSpacing); } else { numDown = counter; numAcross = 1; } // how many does this leave // and calculate hte point final double latOffset = MWC.Algorithms.Conversions.m2Degs(numDown * spacing); final double longOffset = MWC.Algorithms.Conversions.m2Degs(numAcross * spacing); // now add this to the bottom-left of the area res = new WorldLocation(getBottomLeft().getLat() + latOffset, getBottomLeft().getLong() + longOffset, getBottomLeft().getDepth()); return res; } // testing for this class static public final class WorldAreaTest extends junit.framework.TestCase { static public final String TEST_ALL_TEST_TYPE = "UNIT"; WorldLocation w1; WorldLocation w2; WorldLocation w3; WorldLocation w4; WorldLocation w5; WorldArea wa1; WorldArea wa2; WorldArea wa3; WorldArea wa4; private WorldArea wa5; public WorldAreaTest(final String val) { super(val); } public final void setUp() { // set the earth model we are expecting MWC.GenericData.WorldLocation .setModel(new MWC.Algorithms.EarthModels.FlatEarth()); w1 = new WorldLocation(12, 12, 0); w2 = new WorldLocation(10, 10, 100); w3 = new WorldLocation(9, 9, 110); w4 = new WorldLocation(58, 12, 0); w5 = new WorldLocation(62, 10, 100); wa1 = new WorldArea(w1, w2); wa2 = new WorldArea(w1, w2); wa3 = new WorldArea(w1, w3); wa4 = new WorldArea(w4, w5); wa5 = new WorldArea(w2, w5); } public final void tearDown() { w1 = null; w2 = null; w3 = null; wa1 = null; wa2 = null; wa3 = null; } public final void testRangeFromEdge() { double dist1 = wa1.rangeFromEdge(new WorldLocation(13, 11, 0)); assertEquals("correct range", 1d, dist1, 0.01); dist1 = wa1.rangeFromEdge(new WorldLocation(11, 11, 0)); assertEquals("correct range", 0d, dist1, 0.01); dist1 = wa1.rangeFromEdge(new WorldLocation(9, 10.4, 0)); assertEquals("correct range", 1d, dist1, 0.01); dist1 = wa1.rangeFromEdge(new WorldLocation(9, 11, 0)); assertEquals("correct range", 1d, dist1, 0.01); dist1 = wa1.rangeFromEdge(new WorldLocation(11, 13, 0)); assertEquals("correct range", 1d, dist1, 0.02); dist1 = wa1.rangeFromEdge(new WorldLocation(11.2, 13, 0)); assertEquals("correct range", 1d, dist1, 0.02); dist1 = wa1.rangeFromEdge(new WorldLocation(11, 9, 0)); assertEquals("correct range", 1d, dist1, 0.02); dist1 = wa5.rangeFromEdge(new WorldLocation(10, 11, 100)); assertEquals("correct range", 1d, dist1, 0.02); } public final void testDiffRangeFromEdge() { final WorldArea was = new WorldArea(new WorldLocation(0.5, 4.05, 0), new WorldLocation(0, 4.05, 0)); final WorldLocation tgt = new WorldLocation(0.127, 0.5, 0); final double dist = was.rangeFrom(tgt); assertTrue(dist > 1); } public final void testConstructor() { final WorldArea ww1 = new WorldArea(w1, w2); final WorldArea ww2 = new WorldArea(wa1); assertEquals("constructor worked", ww1.equals(wa1), true); assertEquals("constructor worked", ww2.equals(wa1), true); } public final void testContains() { // assuming the area is TL: 12, 9, 110 and BR: 9, 12, 0 final WorldLocation offLeft = new WorldLocation(10, 8, 0); final WorldLocation offRight = new WorldLocation(10, 13, 0); final WorldLocation offTop = new WorldLocation(14, 9, 0); final WorldLocation offBottom = new WorldLocation(7, 9, 0); final WorldLocation tooShallow = new WorldLocation(9, 9, -5); final WorldLocation tooDeep = new WorldLocation(9, 9, 200); assertTrue("w1 in wa3", wa3.contains(w1)); assertTrue("w2 in wa3", wa3.contains(w2)); assertTrue("w3 in wa3", wa3.contains(w3)); assertTrue("offLeft outside wa2", !wa2.contains(offLeft)); assertTrue("offRight outside wa2", !wa2.contains(offRight)); assertTrue("offTop outside wa2", !wa2.contains(offTop)); assertTrue("offBottom outside wa2", !wa2.contains(offBottom)); assertTrue("tooShallow outside wa2", !wa2.contains(tooShallow)); assertTrue("tooDeep outside wa2", !wa2.contains(tooDeep)); } public final void testEquals() { final WorldArea ww = new WorldArea(w2, w1); assertTrue("Identical areas", ww.equals(wa1)); final WorldArea ww2 = new WorldArea(w3, w1); assertTrue("Different areas", !ww2.equals(wa1)); } public final void testExtend() { final WorldArea ww4 = new WorldArea(wa1); final WorldArea ww5 = new WorldArea(wa1); ww4.extend(w3); assertTrue("Extending using location", ww4.equals(wa3)); ww5.extend(wa3); assertTrue("Extending using area", ww5.equals(wa3)); } public final void testAreaCalcs() { w4 = new WorldLocation(58, 12, 0); w5 = new WorldLocation(62, 10, 100); wa4 = new WorldArea(w4, w5); assertTrue(wa4.getBottomRight().equals(new WorldLocation(58, 12, 0))); assertTrue(wa4.getTopLeft().equals(new WorldLocation(62, 10, 100))); assertTrue(wa4.getTopRight().equals(new WorldLocation(62, 12, 100))); assertTrue(wa4.getBottomLeft().equals(new WorldLocation(58, 10, 0))); final WorldLocation first = wa4.getCentre(); final WorldLocation other = new WorldLocation(60, 11, 50); assertTrue(first.equals(other)); assertEquals("Check depth range of area", wa4.getDepthRange(), 100, 0d); assertEquals("Check height of area", wa4.getHeight(), 4.0, 0d); assertEquals("Check width of area", wa4.getWidth(), 1.0, 0d); } public final void testMissingDepthData() { w4 = new WorldLocation(58, 12, 0); w5 = new WorldLocation(62, 10, 100); final WorldLocation w6 = new WorldLocation(64, 9, Double.NaN); wa4 = new WorldArea(w4, w5); // ok, try to extend it assertEquals("shallow depth valid", wa4.getBottomRight().getDepth(), 0d, 0.1d); assertEquals("deep depth valid", wa4.getTopLeft().getDepth(), 100d, 0.1d); wa4.extend(w6); // ok, try to extend it assertEquals("shallow depth valid", wa4.getBottomRight().getDepth(), 0d, 0.1d); assertEquals("deep depth valid", wa4.getTopLeft().getDepth(), 100d, 0.1d); assertEquals("lat updated", wa4.getTopLeft().getLat(), 64d, 0.1d); assertEquals("lat updated", wa4.getBottomRight().getLat(), 58, 0.1d); assertEquals("long updated", wa4.getTopLeft().getLong(), 9d, 0.1d); assertEquals("long updated", wa4.getBottomRight().getLong(), 12d, 0.1d); } public final void testNormalise() { final WorldArea ww1 = new WorldArea(w1, w3); final WorldArea ww2 = new WorldArea(w3, w1); assertTrue("Checking normalise", ww1.equals(ww2)); } public final void testOverlap() { final WorldLocation aa = new WorldLocation(4, 3, 0); final WorldLocation ab = new WorldLocation(2, 5, 0); final WorldLocation a1a = new WorldLocation(6, 1, 0); final WorldLocation a1b = new WorldLocation(4, 2.9, 0); final WorldLocation a2a = new WorldLocation(6, 5.1, 0); final WorldLocation a2b = new WorldLocation(4, 7, 0); final WorldLocation a3a = new WorldLocation(5, 2, 0); final WorldLocation a3b = new WorldLocation(3, 4, 0); final WorldLocation a4a = new WorldLocation(5, 4, 0); final WorldLocation a4b = new WorldLocation(3, 6, 0); final WorldLocation a5a = new WorldLocation(3.5, 3.5, 0); final WorldLocation a5b = new WorldLocation(4.5, 4.5, 0); final WorldLocation a6a = new WorldLocation(3, 2, 0); final WorldLocation a6b = new WorldLocation(1, 4, 0); final WorldLocation a7a = new WorldLocation(3, 4, 0); final WorldLocation a7b = new WorldLocation(1, 6, 0); final WorldLocation a8a = new WorldLocation(2, 1, 0); final WorldLocation a8b = new WorldLocation(0, 2.9, 0); final WorldLocation a9a = new WorldLocation(2, 5.1, 0); final WorldLocation a9b = new WorldLocation(0, 7, 0); final WorldArea a1 = new WorldArea(a1a, a1b); final WorldArea a2 = new WorldArea(a2a, a2b); final WorldArea a3 = new WorldArea(a3a, a3b); final WorldArea a4 = new WorldArea(a4a, a4b); final WorldArea a5 = new WorldArea(a5a, a5b); final WorldArea a6 = new WorldArea(a6a, a6b); final WorldArea a7 = new WorldArea(a7a, a7b); final WorldArea a8 = new WorldArea(a8a, a8b); final WorldArea a9 = new WorldArea(a9a, a9b); final WorldArea waa = new WorldArea(aa, ab); assertTrue("a1", !waa.overlaps(a1)); assertTrue("a2", !waa.overlaps(a2)); assertTrue("a3", waa.overlaps(a3)); assertTrue("a4", waa.overlaps(a4)); assertTrue("a5", waa.overlaps(a5)); assertTrue("a6", waa.overlaps(a6)); assertTrue("a7", waa.overlaps(a7)); assertTrue("a8", !waa.overlaps(a8)); assertTrue("a9", !waa.overlaps(a9)); } public final void testRangeFrom() { final WorldLocation a1a = new WorldLocation(6, 1, 0); final WorldLocation a1b = new WorldLocation(4, 3, 0); final WorldLocation a2a = new WorldLocation(6, 5, 0); final WorldLocation a2b = new WorldLocation(4, 7, 0); final WorldArea a1 = new WorldArea(a1a, a1b); final double r1 = a1.rangeFrom(a2a); final double r2 = a1.rangeFrom(a2b); final double r3 = a2a.rangeFrom(new WorldLocation(6, 3, 0)); final double r4 = new WorldLocation(4, 3, 0d).rangeFrom(a2b); assertEquals("Checking range A from", r3, r1, 0d); assertEquals("Checking range B from", r4, r2, 0d); } public final void testRangeFrom2() { final WorldLocation a1a = new WorldLocation(0, 0, 0); final WorldLocation a1b = new WorldLocation(8, 4, 0); final WorldLocation a2a = new WorldLocation(0, 2, 0); final WorldLocation a2b = new WorldLocation(8, 2, 0); final WorldArea a1 = new WorldArea(a1a, a1b); final double r1 = a1.rangeFrom(a2a); final double r2 = a1.rangeFrom(a2b); // double r3 = a2a.rangeFrom(new WorldLocation(6, 3, 0)); // double r4 = new WorldLocation(4, 3, 0d).rangeFrom(a2b); assertEquals("Checking range A from", 2, r1, 0d); assertEquals("Checking range B from", 2, r2, 0.1d); } public final void testGrow() { final WorldLocation a1a = new WorldLocation(6, 1, 0); final WorldLocation a1b = new WorldLocation(4, 3, 0); final WorldLocation newTL = new WorldLocation(7, 0, 100); final WorldLocation newBR = new WorldLocation(3, 4, -100); final WorldArea a1 = new WorldArea(a1a, a1b); a1.grow(1, 100); assertEquals("Checking new top left", newTL, a1.getTopLeft()); assertEquals("Checking new bottom right", newBR, a1.getBottomRight()); } public void testChangeCentre() { final WorldLocation wa = new WorldLocation(15, 13, 0); final WorldLocation wb = new WorldLocation(13, 15, 0); final WorldLocation wcenter = new WorldLocation(14, 14, 0); final WorldLocation wcenter_b = new WorldLocation(1, 1, 0); final WorldArea w_a = new WorldArea(wa, wb); // check the centre assertEquals("original centre is right", w_a.getCentre(), wcenter); // shift it w_a.setCentre(wcenter_b); // check the centre assertEquals("new centre is right", w_a.getCentre(), wcenter_b); // shift it w_a.setCentre(wcenter); // check the centre assertEquals("new centre is right", w_a.getCentre(), wcenter); // and the corners assertEquals("new TL", wa.getLat(), w_a.getTopLeft().getLat(), 0.03); assertEquals("new TL", wa.getLong(), w_a.getTopLeft().getLong(), 0.03); assertEquals("new BR", wb.getLat(), w_a.getBottomRight().getLat(), 0.03); assertEquals("new BR", wb.getLong(), w_a.getBottomRight().getLong(), 0.03); final WorldLocation currentTL = new WorldLocation(w_a.getTopLeft()); // go through the cycle once again // shift it w_a.setCentre(wcenter_b); // check the centre assertEquals("new centre is right", w_a.getCentre(), wcenter_b); // shift it w_a.setCentre(wcenter); // check the centre assertEquals("new centre is right", w_a.getCentre(), wcenter); // and the updated top-left assertEquals("tl same as last time", currentTL, w_a.getTopLeft()); } public void testDistribution() { final WorldArea theArea = new WorldArea(new WorldLocation(2, 2, 0), new WorldLocation(3, 3, 0)); final WorldLocation first = theArea.getDistributedLocation(0, 10); assertEquals("first point correct", 2, first.getLat(), 0); assertEquals("first point correct", 2, first.getLong(), 0); } } /** * generate a random location, uniformly distributed within this area * * @return */ public WorldLocation getRandomLocation() { double _lat, _long, _depth; _lat = _bottomLeft.getLat() + Math.random() * getHeight(); _long = _bottomLeft.getLong() + Math.random() * getWidth(); _depth = _bottomLeft.getDepth() + Math.random() * getDepthRange(); return new WorldLocation(_lat, _long, _depth); } private void trimLocation(final WorldLocation loc) { loc.setLat(Math.min(loc.getLat(), 90)); loc.setLat(Math.max(loc.getLat(), -90)); loc.setLong(Math.min(loc.getLong(), 180)); loc.setLong(Math.max(loc.getLong(), -180)); } public void trim() { // do it, one corner at a time trimLocation(_topLeft); trimLocation(_bottomRight); // and sort out the other corners normalise(); } }
package net.sourceforge.jtds.test; import java.sql.*; /** * @version 1.0 */ public class ResultSetTest extends TestBase { public ResultSetTest(String name) { super(name); } /** * Test BIT data type. */ public void testGetObject1() throws Exception { boolean data = true; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #getObject1 (data BIT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #getObject1 (data) VALUES (?)"); pstmt.setBoolean(1, data); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); Statement stmt2 = con.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT data FROM #getObject1"); assertTrue(rs.next()); assertTrue(data == rs.getBoolean(1)); assertTrue(rs.getByte(1) == 1); assertTrue(rs.getShort(1) == 1); assertTrue(rs.getInt(1) == 1); assertTrue(rs.getLong(1) == 1); assertTrue(rs.getFloat(1) == 1); assertTrue(rs.getDouble(1) == 1); assertTrue(rs.getBigDecimal(1).byteValue() == 1); assertTrue("1".equals(rs.getString(1))); Object tmpData = rs.getObject(1); assertTrue(tmpData instanceof Boolean); assertTrue(data == ((Boolean) tmpData).booleanValue()); assertTrue(!rs.next()); stmt2.close(); rs.close(); } /** * Test TINYINT data type. */ public void testGetObject2() throws Exception { byte data = 1; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #getObject2 (data TINYINT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #getObject2 (data) VALUES (?)"); pstmt.setByte(1, data); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); Statement stmt2 = con.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT data FROM #getObject2"); assertTrue(rs.next()); assertTrue(rs.getBoolean(1)); assertTrue(rs.getByte(1) == 1); assertTrue(rs.getShort(1) == 1); assertTrue(rs.getInt(1) == 1); assertTrue(rs.getLong(1) == 1); assertTrue(rs.getFloat(1) == 1); assertTrue(rs.getDouble(1) == 1); assertTrue(rs.getBigDecimal(1).byteValue() == 1); assertTrue("1".equals(rs.getString(1))); Object tmpData = rs.getObject(1); assertTrue(tmpData instanceof Byte); assertTrue(data == ((Byte) tmpData).byteValue()); assertTrue(!rs.next()); stmt2.close(); rs.close(); } /** * Test SMALLINT data type. */ public void testGetObject3() throws Exception { short data = 1; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #getObject3 (data SMALLINT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #getObject3 (data) VALUES (?)"); pstmt.setShort(1, data); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); Statement stmt2 = con.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT data FROM #getObject3"); assertTrue(rs.next()); assertTrue(rs.getBoolean(1)); assertTrue(rs.getByte(1) == 1); assertTrue(rs.getShort(1) == 1); assertTrue(rs.getInt(1) == 1); assertTrue(rs.getLong(1) == 1); assertTrue(rs.getFloat(1) == 1); assertTrue(rs.getDouble(1) == 1); assertTrue(rs.getBigDecimal(1).shortValue() == 1); assertTrue("1".equals(rs.getString(1))); Object tmpData = rs.getObject(1); assertTrue(tmpData instanceof Short); assertTrue(data == ((Short) tmpData).shortValue()); assertTrue(!rs.next()); stmt2.close(); rs.close(); } /** * Test INT data type. */ public void testGetObject4() throws Exception { int data = 1; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #getObject4 (data INT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #getObject4 (data) VALUES (?)"); pstmt.setInt(1, data); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); Statement stmt2 = con.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT data FROM #getObject4"); assertTrue(rs.next()); assertTrue(rs.getBoolean(1)); assertTrue(rs.getByte(1) == 1); assertTrue(rs.getShort(1) == 1); assertTrue(rs.getInt(1) == 1); assertTrue(rs.getLong(1) == 1); assertTrue(rs.getFloat(1) == 1); assertTrue(rs.getDouble(1) == 1); assertTrue(rs.getBigDecimal(1).intValue() == 1); assertTrue("1".equals(rs.getString(1))); Object tmpData = rs.getObject(1); assertTrue(tmpData instanceof Integer); assertTrue(data == ((Integer) tmpData).intValue()); assertTrue(!rs.next()); stmt2.close(); rs.close(); } /** * Test BIGINT data type. */ public void testGetObject5() throws Exception { if (!props.getProperty("TDS", "7.0").equals("8.0")) { System.out.println("testGetObject5() requires TDS 8"); return; } long data = 1; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #getObject5 (data BIGINT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #getObject5 (data) VALUES (?)"); pstmt.setLong(1, data); assertTrue(pstmt.executeUpdate() == 1); pstmt.close(); Statement stmt2 = con.createStatement(); ResultSet rs = stmt2.executeQuery("SELECT data FROM #getObject5"); assertTrue(rs.next()); assertTrue(rs.getBoolean(1)); assertTrue(rs.getByte(1) == 1); assertTrue(rs.getShort(1) == 1); assertTrue(rs.getInt(1) == 1); assertTrue(rs.getLong(1) == 1); assertTrue(rs.getFloat(1) == 1); assertTrue(rs.getDouble(1) == 1); assertTrue(rs.getBigDecimal(1).longValue() == 1); assertTrue("1".equals(rs.getString(1))); Object tmpData = rs.getObject(1); assertTrue(tmpData instanceof Long); assertTrue(data == ((Long) tmpData).longValue()); assertTrue(!rs.next()); stmt2.close(); rs.close(); } /** * Test for bug [961594] ResultSet. */ public void testResultSetScroll1() throws Exception { int count = 125; Statement stmt = con.createStatement(); stmt.execute("CREATE TABLE #resultSetScroll1 (data INT)"); stmt.close(); PreparedStatement pstmt = con.prepareStatement("INSERT INTO #resultSetScroll1 (data) VALUES (?)"); for (int i = 1; i <= count; i++) { pstmt.setInt(1, i); assertTrue(pstmt.executeUpdate() == 1); } pstmt.close(); Statement stmt2 = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rs = stmt2.executeQuery("SELECT data FROM #resultSetScroll1"); assertTrue(rs.last()); assertTrue(rs.getRow() == count); stmt2.close(); rs.close(); } public static void main(String[] args) { junit.textui.TestRunner.run(ResultSetTest.class); } }
package com.jcwhatever.bukkit.generic.utils; import com.jcwhatever.bukkit.generic.utils.text.TextUtils; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; import java.util.Scanner; import javax.annotation.Nullable; /** * File handling utilities. */ public final class FileUtils { private FileUtils() {} /** * Specifies how sub directories are traversed when * searching for script files. */ public enum DirectoryTraversal { /** * Do not traverse sub directories. */ NONE, /** * Traverse all sub directories. */ RECURSIVE } /** * Get all non-directory files in a folder. * * @param folder The folder to search for files in. * @param traversal The directory traversal of the search. */ public static List<File> getFiles(File folder, DirectoryTraversal traversal) { return getFiles(folder, traversal, null); } /** * Get all non-directory files in a folder. * * @param folder The folder to search for files in. * @param traversal The directory traversal of the search. * @param fileValidator The validator used to validate files. */ public static List<File> getFiles(File folder, DirectoryTraversal traversal, @Nullable IEntryValidator<File> fileValidator) { PreCon.notNull(folder); PreCon.isValid(folder.isDirectory(), "folder argument must be a folder."); PreCon.notNull(traversal); File[] files = folder.listFiles(); if (files == null) return new ArrayList<>(0); List<File> results = new ArrayList<File>(files.length); for (File file : files) { if (file.isDirectory() && traversal == DirectoryTraversal.RECURSIVE) { List<File> traversed = getFiles(file, DirectoryTraversal.RECURSIVE, fileValidator); results.addAll(traversed); } else if (!file.isDirectory()) { if (fileValidator != null && !fileValidator.isValid(file)) continue; results.add(file); } } return results; } /** * Get the extension of a file, not including a dot. * * @param file The file to check. */ public static String getFileExtension(File file) { PreCon.notNull(file); return getFileExtension(file.getName()); } /** * Get the extension of a file name, not including a dot. * * @param fileName The file name to check. */ public static String getFileExtension(String fileName) { PreCon.notNull(fileName); int i = fileName.lastIndexOf('.'); if (i != -1) { return fileName.substring(i + 1); } return ""; } /** * Get the name of a file not including the extension. * * @param file The file to check. */ public static String getNameWithoutExtension(File file) { PreCon.notNull(file); return getNameWithoutExtension(file.getName()); } /** * Get the name of a file not including the extension. * * @param fileName The file name to check. */ public static String getNameWithoutExtension(String fileName) { PreCon.notNull(fileName); int i = fileName.lastIndexOf('.'); if (i != -1) { return fileName.substring(0, i); } return fileName; } /** * Get the relative path of a file using a base path * to specify the absolute portion. * * @param base The absolute portion of the path. * @param path The absolute path to convert to a relative path. */ public static String getRelative(File base, File path) { String absBase = base.getAbsolutePath(); String absPath = path.getAbsolutePath(); if (absPath.indexOf(absBase) != 0) return absPath; return absPath.substring(absBase.length()); } @Nullable public static String scanTextFile(Class<?> cls, String resourcePath, Charset charSet) { return scanTextFile(cls, resourcePath, charSet, null); } @Nullable public static String scanTextFile(Class<?> cls, String resourcePath, Charset charSet, @Nullable IEntryValidator<String> lineValidator) { PreCon.notNull(cls); PreCon.notNullOrEmpty(resourcePath); PreCon.notNull(charSet); InputStream input = cls.getResourceAsStream(resourcePath); if (input == null) return null; String result = scanTextFile(input, charSet, 50, lineValidator); try { input.close(); } catch (IOException e) { e.printStackTrace(); } return result; } @Nullable public static String scanTextFile(File file, Charset charSet) { PreCon.notNull(file); PreCon.notNull(charSet); return scanTextFile(file, charSet, null); } @Nullable public static String scanTextFile(File file, Charset charSet, @Nullable IEntryValidator<String> lineValidator) { PreCon.notNull(file); PreCon.notNull(charSet); InputStream input = null; try { input = new FileInputStream(file); } catch (FileNotFoundException e) { e.printStackTrace(); } if (input == null) return null; String result = scanTextFile(input, charSet, (int)file.length(), lineValidator); try { input.close(); } catch (IOException e) { e.printStackTrace(); } return result; } public static String scanTextFile(InputStream input, Charset charSet, int initialBufferSize) { PreCon.notNull(input); PreCon.notNull(charSet); return scanTextFile(input, charSet, initialBufferSize, null); } public static String scanTextFile(InputStream input, Charset charSet, int initialBufferSize, @Nullable IEntryValidator<String> lineValidator) { PreCon.notNull(input); PreCon.notNull(charSet); StringBuilder result = new StringBuilder(initialBufferSize); Scanner scanner = new Scanner(input, charSet.name()); while (scanner.hasNextLine()) { String line = scanner.nextLine(); if (lineValidator != null && !lineValidator.isValid(line)) continue; result.append(line); result.append('\n'); } return result.toString(); } /** * Writes a text file. * * <p>Intended for writing a single line at a time. The line producer * is used to retrieve each line and to know when to stop.</p> * * <p>The method finishes when the line producer returns null</p> * * @param file The file to write. * @param charset The encoding to use. * @param lineProducer The line producer. * * @return The number of lines written. */ public static int writeTextFile(File file, Charset charset, ITextLineProducer lineProducer) { PreCon.notNull(file); PreCon.notNull(charset); return writeTextFile(file, charset, -1, lineProducer); } /** * Writes a text file. * * <p>Intended for writing a single line at a time. The line producer * is used to retrieve each line.</p> * * <p>The method finishes when the line producer returns null or the total number * of lines written matches the specified total.</p> * * @param file The file to write. * @param charset The encoding to use. * @param totalLines The total number of lines to write. * @param lineProducer The line producer. * * @return The number of lines written. */ public static int writeTextFile(File file, Charset charset, int totalLines, ITextLineProducer lineProducer) { PreCon.notNull(file); PreCon.notNull(charset); OutputStreamWriter writer = null; int written = 0; try { FileOutputStream fileStream = new FileOutputStream(file); writer = new OutputStreamWriter(fileStream, charset.name()); while (written < totalLines || totalLines == -1) { String line = lineProducer.nextLine(); if (line == null) break; writer.write(line); writer.write('\n'); written++; } } catch (IOException e) { e.printStackTrace(); } finally { if (writer != null) { try { writer.close(); } catch (IOException e) { e.printStackTrace(); } } } return written; } /** * Extract a class resource into a file. * * @param cls The class to get a resource stream from. * @param resourcePath The path of the file within the class jar file. * @param outDir The output directory. * * @throws java.lang.RuntimeException */ @Nullable public static File extractResource(Class<?> cls, String resourcePath, File outDir) { PreCon.notNull(cls); PreCon.notNull(resourcePath); PreCon.notNull(outDir); if (!outDir.exists() && !outDir.mkdirs()) throw new RuntimeException("Failed to create output folder(s)."); File outFile = new File(outDir, getFilename(resourcePath)); if (outFile.exists()) return outFile; InputStream input = cls.getResourceAsStream("/res/" + resourcePath); if (input == null) return null; FileOutputStream output = null; try { if (!outFile.createNewFile()) { return null; } output = new FileOutputStream(outFile); byte[] buffer = new byte[4096]; int read; while ((read = input.read(buffer)) > 0) { output.write(buffer, 0, read); } } catch (IOException e) { e.printStackTrace(); } finally { try { input.close(); } catch (IOException e) { e.printStackTrace(); } try { if (output != null) output.close(); } catch (IOException e) { e.printStackTrace(); } } return outFile; } private static String getFilename(String resource) { String[] components = TextUtils.PATTERN_FILEPATH_SLASH.split(resource); return components[components.length - 1]; } /** * Interface for getting the next line * to write to a text file. */ public interface ITextLineProducer { /** * Get the next line. * * @return Null to stop writing. */ @Nullable String nextLine(); } }
package org.osgi.framework; import java.util.NoSuchElementException; import java.util.StringTokenizer; /** * Version identifier for bundles and packages. * * <p> * Version identifiers have four components. * <ol> * <li>Major version. A non-negative integer.</li> * <li>Minor version. A non-negative integer.</li> * <li>Micro version. A non-negative integer.</li> * <li>Qualifier. A text string. See {@link #parseVersion(String) parseVersion} * for the format of the qualifier string.</li> * </ol> * * <p> * <code>Version</code> instances are immutable. * * @version $Revision$ * @since 1.3 */ public class Version implements Comparable { private final int major; private final int minor; private final int micro; private final String qualifier; /* cached result of toString */ private transient String string; private static final String SEPARATOR = "."; //$NON-NLS-1$ /** * The empty version "0.0.0". Equivalent to calling * <code>new Version(0,0,0)</code>. */ public static final Version emptyVersion = new Version(0, 0, 0); public Version(int major, int minor, int micro) { this(major, minor, micro, null); } public Version(int major, int minor, int micro, String qualifier) { if (major < 0) { throw new IllegalArgumentException("negative major"); //$NON-NLS-1$ } if (minor < 0) { throw new IllegalArgumentException("negative minor"); //$NON-NLS-1$ } if (micro < 0) { throw new IllegalArgumentException("negative micro"); //$NON-NLS-1$ } if (qualifier == null) { qualifier = ""; //$NON-NLS-1$ } else { int length = qualifier.length(); for (int i = 0; i < length; i++) { if ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-".indexOf(qualifier.charAt(i)) == -1) { //$NON-NLS-1$ throw new IllegalArgumentException("invalid qualifier"); //$NON-NLS-1$ } } } this.major = major; this.minor = minor; this.micro = micro; this.qualifier = qualifier; } public static Version parseVersion(String version) { if ((version == null) || (version.length() == 0)) { return emptyVersion; } int major = 0; int minor = 0; int micro = 0; String qualifier = ""; //$NON-NLS-1$ try { StringTokenizer st = new StringTokenizer(version, SEPARATOR, true); major = Integer.parseInt(st.nextToken()); if (st.hasMoreTokens()) { st.nextToken(); // consume delimiter minor = Integer.parseInt(st.nextToken()); if (st.hasMoreTokens()) { st.nextToken(); // consume delimiter micro = Integer.parseInt(st.nextToken()); if (st.hasMoreTokens()) { st.nextToken(); // consume delimiter qualifier = st.nextToken(); if (st.hasMoreTokens()) { throw new IllegalArgumentException("invalid format"); //$NON-NLS-1$ } } } } } catch (NoSuchElementException e) { throw new IllegalArgumentException("invalid format"); //$NON-NLS-1$ } return new Version(major, minor, micro, qualifier); } /** * Returns the major component of this version identifier. * * @return The major component. */ public int getMajor() { return major; } /** * Returns the minor component of this version identifier. * * @return The minor component. */ public int getMinor() { return minor; } /** * Returns the micro component of this version identifier. * * @return The micro component. */ public int getMicro() { return micro; } /** * Returns the qualifier component of this version identifier. * * @return The qualifier component. */ public String getQualifier() { return qualifier; } /** * Returns the string representation of this version identifier. * * <p> * The format of the version string will be <code>major.minor.micro</code> * if qualifier is the empty string or * <code>major.minor.micro.qualifier</code> otherwise. * * @return The string representation of this version identifier. */ public String toString() { if (string == null) { String base = major + SEPARATOR + minor + SEPARATOR + micro; if (qualifier.length() == 0) { //$NON-NLS-1$ string = base; } else { string = base + SEPARATOR + qualifier; } } return string; } /** * Returns a hash code value for the object. * * @return An integer which is a hash code value for this object. */ public int hashCode() { return toString().hashCode(); } /** * Compares this <code>Version</code> object to another object. * * <p> * A version is considered to be <b>equal to </b> another version if the * major, minor and micro components are equal and the qualifier component * is equal (using <code>String.equals</code>). * * @param object The <code>Version</code> object to be compared. * @return <code>true</code> if <code>object</code> is a * <code>Version</code> and is equal to this object; * <code>false</code> otherwise. */ public boolean equals(Object object) { if (object == this) { // quicktest return true; } if (!(object instanceof Version)) { return false; } Version other = (Version) object; return (major == other.major) && (minor == other.minor) && (micro == other.micro) && qualifier.equals(other.qualifier); } /** * Compares this <code>Version</code> object to another object. * * <p> * A version is considered to be <b>less than </b> another version if its * major component is less than the other version's major component, or the * major components are equal and its minor component is less than the other * version's minor component, or the major and minor components are equal * and its micro component is less than the other version's micro component, * or the major, minor and micro components are equal and it's qualifier * component is less than the other version's qualifier component (using * <code>String.compareTo</code>). * * <p> * A version is considered to be <b>equal to </b> another version if the * major, minor and micro components are equal and the qualifier component * is equal (using <code>String.compareTo</code>). * * @param object The <code>Version</code> object to be compared. * @return A negative integer, zero, or a positive integer if this object is * less than, equal to, or greater than the specified * <code>Version</code> object. * @throws ClassCastException If the specified object is not a * <code>Version</code>. */ public int compareTo(Object object) { if (object == this) { // quicktest return 0; } Version other = (Version) object; int result = major - other.major; if (result != 0) { return result; } result = minor - other.minor; if (result != 0) { return result; } result = micro - other.micro; if (result != 0) { return result; } return qualifier.compareTo(other.qualifier); } }
package com.opencms.file.genericSql; import javax.servlet.http.*; import java.util.*; import java.net.*; import java.io.*; import source.org.apache.java.io.*; import source.org.apache.java.util.*; import com.opencms.core.*; import com.opencms.file.*; import com.opencms.template.*; import java.sql.SQLException; public class CmsResourceBroker implements I_CmsResourceBroker, I_CmsConstants { //create a compare class to be used in the vector. class Resource { private String path = null; public Resource(String path) { this.path = path; } public boolean equals(Object obj) { return ( (obj instanceof CmsResource) && path.equals( ((CmsResource) obj).getAbsolutePath() )); } } /** * Constant to count the file-system changes. */ protected long m_fileSystemChanges = 0; /** * Constant to count the file-system changes if Folders are involved. */ protected long m_fileSystemFolderChanges = 0; /** * Hashtable with resource-types. */ protected Hashtable m_resourceTypes = null; /** * The configuration of the property-file. */ protected Configurations m_configuration = null; /** * The access-module. */ protected CmsDbAccess m_dbAccess = null; /** * The Registry */ protected I_CmsRegistry m_registry = null; /** * Define the caches */ protected CmsCache m_userCache = null; protected CmsCache m_groupCache = null; protected CmsCache m_usergroupsCache = null; protected CmsCache m_resourceCache = null; protected CmsCache m_subresCache = null; protected CmsCache m_projectCache = null; protected CmsCache m_onlineProjectCache = null; protected CmsCache m_propertyCache = null; protected CmsCache m_propertyDefCache = null; protected CmsCache m_propertyDefVectorCache = null; protected CmsCache m_accessCache = null; protected int m_cachelimit = 0; protected String m_refresh = null; /** * Delete published project */ protected boolean m_deletePublishedProject = false; /** * Accept a task from the Cms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to accept. * * @exception CmsException Throws CmsException if something goes wrong. */ public void acceptTask(CmsUser currentUser, CmsProject currentProject, int taskId) throws CmsException { CmsTask task = m_dbAccess.readTask(taskId); task.setPercentage(1); task = m_dbAccess.writeTask(task); m_dbAccess.writeSystemTaskLog(taskId, "Task was accepted from " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } /** * Checks, if the user may create this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * * @return wether the user has access, or not. */ public boolean accessCreate(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { // check, if this is the onlineproject if(onlineProject(currentUser, currentProject).equals(currentProject)){ // the online-project is not writeable! return(false); } // check the access to the project if( ! accessProject(currentUser, currentProject, currentProject.getId()) ) { // no access to the project! return(false); } // check if the resource belongs to the current project if(resource.getProjectId() != currentProject.getId()) { return false; } // check the rights and if the resource is not locked do { if( accessOther(currentUser, currentProject, resource, C_ACCESS_PUBLIC_WRITE) || accessOwner(currentUser, currentProject, resource, C_ACCESS_OWNER_WRITE) || accessGroup(currentUser, currentProject, resource, C_ACCESS_GROUP_WRITE) ) { // is the resource locked? if( resource.isLocked() && (resource.isLockedBy() != currentUser.getId() ) ) { // resource locked by anopther user, no creation allowed return(false); } // read next resource if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } } else { // last check was negative return(false); } } while(resource.getParent() != null); // all checks are done positive return(true); } /** * Checks, if the group may access this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * @param flags The flags to check. * * @return wether the user has access, or not. */ protected boolean accessGroup(CmsUser currentUser, CmsProject currentProject, CmsResource resource, int flags) throws CmsException { // is the user in the group for the resource? if(userInGroup(currentUser, currentProject, currentUser.getName(), readGroup(currentUser, currentProject, resource).getName())) { if( (resource.getAccessFlags() & flags) == flags ) { return true; } } // the resource isn't accesible by the user. return false; } /** * Checks, if the user may lock this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * * @return wether the user may lock this resource, or not. */ public boolean accessLock(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { // check, if this is the onlineproject if(onlineProject(currentUser, currentProject).equals(currentProject)){ // the online-project is not writeable! return(false); } // check the access to the project if( ! accessProject(currentUser, currentProject, currentProject.getId()) ) { // no access to the project! return(false); } // check if the resource belongs to the current project if(resource.getProjectId() != currentProject.getId()) { return false; } // read the parent folder if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } else { // no parent folder! return true; } // check the rights and if the resource is not locked do { // is the resource locked? if( resource.isLocked() && (resource.isLockedBy() != currentUser.getId() ) ) { // resource locked by anopther user, no creation allowed return(false); } // read next resource if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } } while(resource.getParent() != null); // all checks are done positive return(true); } /** * Checks, if others may access this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * @param flags The flags to check. * * @return wether the user has access, or not. */ protected boolean accessOther(CmsUser currentUser, CmsProject currentProject, CmsResource resource, int flags) throws CmsException { if ((resource.getAccessFlags() & flags) == flags) { return true; } else { return false; } } /** * Checks, if the owner may access this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * @param flags The flags to check. * * @return wether the user has access, or not. */ protected boolean accessOwner(CmsUser currentUser, CmsProject currentProject, CmsResource resource, int flags) throws CmsException { // The Admin has always access if( isAdmin(currentUser, currentProject) ) { return(true); } // is the resource owned by this user? if(resource.getOwnerId() == currentUser.getId()) { if( (resource.getAccessFlags() & flags) == flags ) { return true ; } } // the resource isn't accesible by the user. return false; } // Methods working with projects /** * Tests if the user can access the project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId the id of the project. * @return true, if the user has access, else returns false. * @exception CmsException Throws CmsException if something goes wrong. */ public boolean accessProject(CmsUser currentUser, CmsProject currentProject, int projectId) throws CmsException { CmsProject testProject = readProject(currentUser, currentProject, projectId); if (projectId==C_PROJECT_ONLINE_ID) { return true; } // is the project unlocked? if( testProject.getFlags() != C_PROJECT_STATE_UNLOCKED ) { return(false); } // is the current-user admin, or the owner of the project? if( (currentProject.getOwnerId() == currentUser.getId()) || isAdmin(currentUser, currentProject) ) { return(true); } // get all groups of the user Vector groups = getGroupsOfUser(currentUser, currentProject, currentUser.getName()); // test, if the user is in the same groups like the project. for(int i = 0; i < groups.size(); i++) { int groupId = ((CmsGroup) groups.elementAt(i)).getId(); if( ( groupId == testProject.getGroupId() ) || ( groupId == testProject.getManagerGroupId() ) ) { return( true ); } } return( false ); } /** * Checks, if the user may read this resource. * NOTE: If the ressource is in the project you never have to fallback. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * * @return weather the user has access, or not. */ public boolean accessRead(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { Boolean access=(Boolean)m_accessCache.get(currentUser.getId()+":"+currentProject.getId()+":"+resource.getName()); if (access != null) { return access.booleanValue(); } else { if ((resource == null) || !accessProject(currentUser, currentProject, resource.getProjectId()) || (!accessOther(currentUser, currentProject, resource, C_ACCESS_PUBLIC_READ) && !accessOwner(currentUser, currentProject, resource, C_ACCESS_OWNER_READ) && !accessGroup(currentUser, currentProject, resource, C_ACCESS_GROUP_READ))) { m_accessCache.put(currentUser.getId()+":"+currentProject.getId()+":"+resource.getName(),new Boolean(false)); return false; } // check the rights for all CmsResource res = resource; // save the original resource name to be used if an error occurs. while (res.getParent() != null) { //res = m_dbAccess.readFolder(res.getProjectId(), res.getParent()); res = readFolder(currentUser, currentProject, res.getParent()); if (res == null) { A_OpenCms.log(A_OpenCms.C_OPENCMS_DEBUG, "Resource has no parent: " + resource.getAbsolutePath()); throw new CmsException(this.getClass().getName() + ".accessRead(): Cannot find \'" + resource.getName(), CmsException.C_NOT_FOUND); } if (!accessOther(currentUser, currentProject, res, C_ACCESS_PUBLIC_READ) && !accessOwner(currentUser, currentProject, res, C_ACCESS_OWNER_READ) && !accessGroup(currentUser, currentProject, res, C_ACCESS_GROUP_READ)) { m_accessCache.put(currentUser.getId()+":"+currentProject.getId()+":"+resource.getName(),new Boolean(false)); return false; } } m_accessCache.put(currentUser.getId()+":"+currentProject.getId()+":"+resource.getName(),new Boolean(true)); return true; } } /** * Checks, if the user may unlock this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * * @return wether the user may unlock this resource, or not. */ public boolean accessUnlock(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { // check, if this is the onlineproject if(onlineProject(currentUser, currentProject).equals(currentProject)){ // the online-project is not writeable! return(false); } // check the access to the project if( ! accessProject(currentUser, currentProject, currentProject.getId()) ) { // no access to the project! return(false); } // check if the resource belongs to the current project if(resource.getProjectId() != currentProject.getId()) { return false; } // read the parent folder if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } else { // no parent folder! return true; } // check if the resource is not locked do { // is the resource locked? if( resource.isLocked() ) { // resource locked by anopther user, no creation allowed return(false); } // read next resource if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } } while(resource.getParent() != null); // all checks are done positive return(true); } /** * Checks, if the user may write this resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The resource to check. * * @return wether the user has access, or not. */ public boolean accessWrite(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { // check, if this is the onlineproject if(onlineProject(currentUser, currentProject).equals(currentProject)){ // the online-project is not writeable! return(false); } // check the access to the project if( ! accessProject(currentUser, currentProject, currentProject.getId()) ) { // no access to the project! return(false); } // check if the resource belongs to the current project if(resource.getProjectId() != currentProject.getId()) { return false; } // check, if the resource is locked by the current user if(resource.isLockedBy() != currentUser.getId()) { // resource is not locked by the current user, no writing allowed return(false); } // check the rights for the current resource if( ! ( accessOther(currentUser, currentProject, resource, C_ACCESS_PUBLIC_WRITE) || accessOwner(currentUser, currentProject, resource, C_ACCESS_OWNER_WRITE) || accessGroup(currentUser, currentProject, resource, C_ACCESS_GROUP_WRITE) ) ) { // no write access to this resource! return false; } // read the parent folder if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } else { // no parent folder! return true; } // check the rights and if the resource is not locked do { if( accessOther(currentUser, currentProject, resource, C_ACCESS_PUBLIC_WRITE) || accessOwner(currentUser, currentProject, resource, C_ACCESS_OWNER_WRITE) || accessGroup(currentUser, currentProject, resource, C_ACCESS_GROUP_WRITE) ) { // is the resource locked? if( resource.isLocked() && (resource.isLockedBy() != currentUser.getId() ) ) { // resource locked by anopther user, no creation allowed return(false); } // read next resource if(resource.getParent() != null) { resource = readFolder(currentUser,currentProject, resource.getParent()); } } else { // last check was negative return(false); } } while(resource.getParent() != null); // all checks are done positive return(true); } /** * adds a file extension to the list of known file extensions * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param extension a file extension like 'html' * @param resTypeName name of the resource type associated to the extension */ public void addFileExtension(CmsUser currentUser, CmsProject currentProject, String extension, String resTypeName) throws CmsException { if (extension != null && resTypeName != null) { if (isAdmin(currentUser, currentProject)) { Hashtable suffixes=(Hashtable) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS); if (suffixes == null) { suffixes = new Hashtable(); suffixes.put(extension, resTypeName); m_dbAccess.addSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS, suffixes); } else { suffixes.put(extension, resTypeName); m_dbAccess.writeSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS, suffixes); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + extension, CmsException.C_NO_ACCESS); } } } /** * Add a new group to the Cms.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the new group. * @param description The description for the new group. * @int flags The flags for the new group. * @param name The name of the parent group (or null). * * @return Group * * @exception CmsException Throws CmsException if operation was not succesfull. */ public CmsGroup addGroup(CmsUser currentUser, CmsProject currentProject, String name, String description, int flags, String parent) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) ) { name = name.trim(); validName(name, false); // check the lenght of the groupname if(name.length() > 1) { return( m_dbAccess.createGroup(name, description, flags, parent) ); } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_BAD_NAME); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_NO_ACCESS); } } /** * Adds a CmsResourceTypes. * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourceType the name of the resource to get. * @param launcherType the launcherType-id * @param launcherClass the name of the launcher-class normaly "" * * Returns a CmsResourceTypes. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResourceType addResourceType(CmsUser currentUser, CmsProject currentProject, String resourceType, int launcherType, String launcherClass) throws CmsException { if( isAdmin(currentUser, currentProject) ) { // read the resourceTypes from the propertys m_resourceTypes = (Hashtable) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_RESOURCE_TYPE); synchronized(m_resourceTypes) { // get the last index and increment it. Integer lastIndex = new Integer(((Integer) m_resourceTypes.get(C_TYPE_LAST_INDEX)).intValue() + 1); // write the last index back m_resourceTypes.put(C_TYPE_LAST_INDEX, lastIndex); // add the new resource-type m_resourceTypes.put(resourceType, new CmsResourceType(lastIndex.intValue(), launcherType, resourceType, launcherClass)); // store the resource types in the properties m_dbAccess.writeSystemProperty(C_SYSTEMPROPERTY_RESOURCE_TYPE, m_resourceTypes); } // the cached resource types aren't valid any more. m_resourceTypes = null; // return the new resource-type return(getResourceType(currentUser, currentProject, resourceType)); } else { throw new CmsException("[" + this.getClass().getName() + "] " + resourceType, CmsException.C_NO_ACCESS); } } /** * Adds a user to the Cms. * * Only a adminstrator can add users to the cms.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The new name for the user. * @param password The new password for the user. * @param group The default groupname for the user. * @param description The description for the user. * @param additionalInfos A Hashtable with additional infos for the user. These * Infos may be stored into the Usertables (depending on the implementation). * @param flags The flags for a user (e.g. C_FLAG_ENABLED) * * @return user The added user will be returned. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public CmsUser addUser(CmsUser currentUser, CmsProject currentProject, String name, String password, String group, String description, Hashtable additionalInfos, int flags) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) ) { // no space before or after the name name = name.trim(); // check the username validName(name, false); // check the password minimumsize if( (name.length() > 0) && (password.length() >= C_PASSWORD_MINIMUMSIZE) ) { CmsGroup defaultGroup = readGroup(currentUser, currentProject, group); CmsUser newUser = m_dbAccess.addUser(name, password, description, " ", " ", " ", 0, 0, C_FLAG_ENABLED, additionalInfos, defaultGroup, " ", " ", C_USER_TYPE_SYSTEMUSER); addUserToGroup(currentUser, currentProject, newUser.getName(),defaultGroup.getName()); return newUser; } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_SHORT_PASSWORD); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_NO_ACCESS); } } /** * Adds a user to a group.<BR/> * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user that is to be added to the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void addUserToGroup(CmsUser currentUser, CmsProject currentProject, String username, String groupname) throws CmsException { // test if this user is already in the group if (userInGroup(currentUser,currentProject,username,groupname)) { // user already there, throw exception throw new CmsException("[" + this.getClass().getName() + "] add " + username+ " to " +groupname, CmsException.C_USER_EXISTS); } // Check the security if( isAdmin(currentUser, currentProject) ) { CmsUser user; CmsGroup group; user=readUser(currentUser,currentProject,username); //check if the user exists if (user != null) { group=readGroup(currentUser,currentProject,groupname); //check if group exists if (group != null){ //add this user to the group m_dbAccess.addUserToGroup(user.getId(),group.getId()); // update the cache m_usergroupsCache.clear(); } else { throw new CmsException("["+this.getClass().getName()+"]"+groupname,CmsException.C_NO_GROUP); } } else { throw new CmsException("["+this.getClass().getName()+"]"+username,CmsException.C_NO_USER); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } /** * Adds a web user to the Cms. <br> * * A web user has no access to the workplace but is able to access personalized * functions controlled by the OpenCms. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The new name for the user. * @param password The new password for the user. * @param group The default groupname for the user. * @param description The description for the user. * @param additionalInfos A Hashtable with additional infos for the user. These * Infos may be stored into the Usertables (depending on the implementation). * @param flags The flags for a user (e.g. C_FLAG_ENABLED) * * @return user The added user will be returned. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public CmsUser addWebUser(CmsUser currentUser, CmsProject currentProject, String name, String password, String group, String description, Hashtable additionalInfos, int flags) throws CmsException { // no space before or after the name name = name.trim(); // check the username validName(name, false); // check the password minimumsize if( (name.length() > 0) && (password.length() >= C_PASSWORD_MINIMUMSIZE) ) { CmsGroup defaultGroup = readGroup(currentUser, currentProject, group); CmsUser newUser = m_dbAccess.addUser(name, password, description, " ", " ", " ", 0, 0, C_FLAG_ENABLED, additionalInfos, defaultGroup, " ", " ", C_USER_TYPE_WEBUSER); CmsUser user; CmsGroup usergroup; user=m_dbAccess.readUser(newUser.getName(),C_USER_TYPE_WEBUSER); //check if the user exists if (user != null) { usergroup=readGroup(currentUser,currentProject,group); //check if group exists if (usergroup != null){ //add this user to the group m_dbAccess.addUserToGroup(user.getId(),usergroup.getId()); // update the cache m_usergroupsCache.clear(); } else { throw new CmsException("["+this.getClass().getName()+"]"+group,CmsException.C_NO_GROUP); } } else { throw new CmsException("["+this.getClass().getName()+"]"+name,CmsException.C_NO_USER); } return newUser; } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_SHORT_PASSWORD); } } /** * Returns the anonymous user object.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return the anonymous user object. * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser anonymousUser(CmsUser currentUser, CmsProject currentProject) throws CmsException { return readUser(currentUser, currentProject, C_USER_GUEST); } /** * Checks, if all mandatory metainfos for the resource type are set as key in the * metainfo-hashtable. It throws a exception, if a mandatory metainfo is missing. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourceType The type of the rersource to check the metainfos for. * @param propertyinfos The propertyinfos to check. * * @exception CmsException Throws CmsException if operation was not succesful. */ protected void checkMandatoryProperties(CmsUser currentUser, CmsProject currentProject, String resourceType, Hashtable propertyinfos) throws CmsException { // read the mandatory metadefs Vector metadefs = readAllPropertydefinitions(currentUser, currentProject, resourceType, C_PROPERTYDEF_TYPE_MANDATORY); // check, if the mandatory metainfo is given for(int i = 0; i < metadefs.size(); i++) { if( propertyinfos.containsKey(metadefs.elementAt(i) ) ) { // mandatory metainfo is missing - throw exception throw new CmsException("[" + this.getClass().getName() + "] " + (String)metadefs.elementAt(i), CmsException.C_MANDATORY_PROPERTY); } } } /** * Changes the group for this resource<br> * * Only the group of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The complete path to the resource. * @param newGroup The name of the new group for this resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void chgrp(CmsUser currentUser, CmsProject currentProject, String filename, String newGroup) throws CmsException { CmsResource resource=null; // read the resource to check the access if (filename.endsWith("/")) { resource = readFolder(currentUser,currentProject,filename); } else { resource = (CmsFile)readFileHeader(currentUser,currentProject,filename); } // has the user write-access? and is he owner or admin? if( accessWrite(currentUser, currentProject, resource) && ( (resource.getOwnerId() == currentUser.getId()) || isAdmin(currentUser, currentProject))) { CmsGroup group = readGroup(currentUser, currentProject, newGroup); resource.setGroupId(group.getId()); // write-acces was granted - write the file. if (filename.endsWith("/")) { if (resource.getState()==C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } m_dbAccess.writeFolder(currentProject,(CmsFolder)resource,true); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+filename,(CmsFolder)resource); } else { m_dbAccess.writeFileHeader(currentProject,(CmsFile)resource,true); if (resource.getState()==C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+filename,resource); } m_subresCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Changes the flags for this resource.<br> * * Only the flags of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change the flags, if he is admin of the resource <br>. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The complete path to the resource. * @param flags The new accessflags for the resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void chmod(CmsUser currentUser, CmsProject currentProject, String filename, int flags) throws CmsException { CmsResource resource=null; // read the resource to check the access if (filename.endsWith("/")) { resource = readFolder(currentUser,currentProject,filename); } else { resource = (CmsFile)readFileHeader(currentUser,currentProject,filename); } // has the user write-access? if( accessWrite(currentUser, currentProject, resource)|| ((resource.isLockedBy() == currentUser.getId()) && (resource.getOwnerId() == currentUser.getId()||isAdmin(currentUser, currentProject))) ) { // write-acces was granted - write the file. //set the flags resource.setAccessFlags(flags); //update file if (filename.endsWith("/")) { if (resource.getState()==C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } m_dbAccess.writeFolder(currentProject,(CmsFolder)resource,true); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+filename,(CmsFolder)resource); } else { m_dbAccess.writeFileHeader(currentProject,(CmsFile)resource,true); if (resource.getState()==C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+filename,resource); } m_subresCache.clear(); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Changes the owner for this resource.<br> * * Only the owner of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. <br> * * <B>Security:</B> * Access is cranted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or the user is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The complete path to the resource. * @param newOwner The name of the new owner for this resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void chown(CmsUser currentUser, CmsProject currentProject, String filename, String newOwner) throws CmsException { CmsResource resource = null; // read the resource to check the access if (filename.endsWith("/")) { resource = readFolder(currentUser, currentProject, filename); } else { resource = (CmsFile) readFileHeader(currentUser, currentProject, filename); } // has the user write-access? and is he owner or admin? if (((resource.getOwnerId() == currentUser.getId()) || isAdmin(currentUser, currentProject)) && (resource.isLockedBy() == currentUser.getId())) { CmsUser owner = readUser(currentUser, currentProject, newOwner); resource.setUserId(owner.getId()); // write-acces was granted - write the file. if (filename.endsWith("/")) { if (resource.getState() == C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } m_dbAccess.writeFolder(currentProject, (CmsFolder) resource, true); // update the cache m_resourceCache.put(C_FOLDER + currentProject.getId() + filename, (CmsFolder) resource); } else { m_dbAccess.writeFileHeader(currentProject, (CmsFile) resource, true); if (resource.getState() == C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE + currentProject.getId() + filename, resource); } m_subresCache.clear(); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Changes the state for this resource<BR/> * * The user may change this, if he is admin of the resource. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param filename The complete path to the resource. * @param state The new state of this resource. * * @exception CmsException will be thrown, if the user has not the rights * for this resource. */ public void chstate(CmsUser currentUser, CmsProject currentProject, String filename, int state) throws CmsException { boolean isFolder=false; CmsResource resource=null; // read the resource to check the access if (filename.endsWith("/")) { isFolder=true; resource = readFolder(currentUser,currentProject,filename); } else { resource = (CmsFile)readFileHeader(currentUser,currentProject,filename); } // has the user write-access? if( accessWrite(currentUser, currentProject, resource)) { resource.setState(state); // write-acces was granted - write the file. if (filename.endsWith("/")) { m_dbAccess.writeFolder(currentProject,(CmsFolder)resource,false); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+filename,(CmsFolder)resource); } else { m_dbAccess.writeFileHeader(currentProject,(CmsFile)resource,false); // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+filename,resource); } m_subresCache.clear(); // inform about the file-system-change fileSystemChanged(isFolder); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Changes the resourcetype for this resource<br> * * Only the resourcetype of a resource in an offline project can be changed. The state * of the resource is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * The user may change this, if he is admin of the resource. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user is owner of the resource or is admin</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The complete path to the resource. * @param newType The name of the new resourcetype for this resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void chtype(CmsUser currentUser, CmsProject currentProject, String filename, String newType) throws CmsException { CmsResourceType type = getResourceType(currentUser, currentProject, newType); // read the resource to check the access CmsResource resource = readFileHeader(currentUser,currentProject, filename); // has the user write-access? and is he owner or admin? if( accessWrite(currentUser, currentProject, resource) && ( (resource.getOwnerId() == currentUser.getId()) || isAdmin(currentUser, currentProject))) { // write-acces was granted - write the file. resource.setType(type.getResourceType()); resource.setLauncherType(type.getLauncherType()); m_dbAccess.writeFileHeader(currentProject, (CmsFile)resource,true); if (resource.getState()==C_STATE_UNCHANGED) { resource.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+filename,resource); m_subresCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Clears all internal DB-Caches. */ public void clearcache() { m_userCache.clear(); m_groupCache.clear(); m_usergroupsCache.clear(); m_projectCache.clear(); m_resourceCache.clear(); m_subresCache.clear(); m_propertyCache.clear(); m_propertyDefCache.clear(); m_propertyDefVectorCache.clear(); m_onlineProjectCache.clear(); m_accessCache.clear(); CmsTemplateClassManager.clearCache(); } /** * Copies a file in the Cms. <br> * * <B>Security:</B> * Access is cranted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the sourceresource</li> * <li>the user can create the destinationresource</li> * <li>the destinationresource dosn't exists</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param source The complete path of the sourcefile. * @param destination The complete path to the destination. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void copyFile(CmsUser currentUser, CmsProject currentProject, String source, String destination) throws CmsException { // the name of the new file. String filename; // the name of the folder. String foldername; // checks, if the destinateion is valid, if not it throws a exception validFilename(destination.replace('/', 'a')); // read the source-file, to check readaccess CmsResource file = readFileHeader(currentUser, currentProject, source); // split the destination into file and foldername if (destination.endsWith("/")) { filename = file.getName(); foldername = destination; }else{ foldername = destination.substring(0, destination.lastIndexOf("/")+1); filename = destination.substring(destination.lastIndexOf("/")+1, destination.length()); } CmsFolder cmsFolder = readFolder(currentUser,currentProject, foldername); if( accessCreate(currentUser, currentProject, (CmsResource)cmsFolder) ) { // write-acces was granted - copy the file and the metainfos m_dbAccess.copyFile(currentProject, onlineProject(currentUser, currentProject), currentUser.getId(),source,cmsFolder.getResourceId(), foldername + filename); // copy the metainfos lockResource(currentUser, currentProject, destination, true); writeProperties(currentUser,currentProject, destination, readAllProperties(currentUser,currentProject,file.getAbsolutePath())); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(file.isFolder()); } else { throw new CmsException("[" + this.getClass().getName() + "] " + destination, CmsException.C_NO_ACCESS); } } /** * Copies a folder in the Cms. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the sourceresource</li> * <li>the user can create the destinationresource</li> * <li>the destinationresource dosn't exists</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param source The complete path of the sourcefolder. * @param destination The complete path to the destination. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void copyFolder(CmsUser currentUser, CmsProject currentProject, String source, String destination) throws CmsException { // the name of the new file. String filename; // the name of the folder. String foldername; // read the sourcefolder to check readaccess //CmsFolder folder=(CmsFolder)readFolder(currentUser, currentProject, source); // checks, if the destinateion is valid, if not it throws a exception validFilename(destination.replace('/', 'a')); foldername = destination.substring(0, destination.substring(0,destination.length()-1).lastIndexOf("/")+1); CmsFolder cmsFolder = readFolder(currentUser,currentProject, foldername); if( accessCreate(currentUser, currentProject, (CmsResource)cmsFolder) ) { // write-acces was granted - copy the folder and the properties CmsFolder folder=readFolder(currentUser,currentProject,source); m_dbAccess.createFolder(currentUser,currentProject,onlineProject(currentUser, currentProject),folder,cmsFolder.getResourceId(),destination); // copy the properties lockResource(currentUser, currentProject, destination, true); writeProperties(currentUser,currentProject, destination, readAllProperties(currentUser,currentProject,folder.getAbsolutePath())); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(true); } else { throw new CmsException("[" + this.getClass().getName() + "] " + destination, CmsException.C_ACCESS_DENIED); } } public void copyResourceToProject(CmsProject currentProject, CmsProject fromProject, CmsResource resource) throws com.opencms.core.CmsException { m_dbAccess.copyResourceToProject(currentProject, fromProject, resource); } /** * Copies a resource from the online project to a new, specified project.<br> * Copying a resource will copy the file header or folder into the specified * offline project and set its state to UNCHANGED. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user is the owner of the project</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource. * @exception CmsException Throws CmsException if operation was not succesful. */ public void copyResourceToProject(CmsUser currentUser, CmsProject currentProject, String resource) throws CmsException { // read the onlineproject CmsProject online = onlineProject(currentUser, currentProject); // is the current project the onlineproject? // and is the current user the owner of the project? // and is the current project state UNLOCKED? if( (!currentProject.equals( online ) ) && (currentProject.getOwnerId() == currentUser.getId()) && (currentProject.getFlags() == C_PROJECT_STATE_UNLOCKED)) { // is offlineproject and is owner CmsResource onlineRes= readFileHeader(currentUser,online, resource); CmsResource offlineRes=null; // walk recursively through all parents and copy them, too String parent = onlineRes.getParent(); Stack resources=new Stack(); // go through all parens and store them on a stack while(parent != null) { // read the online-resource onlineRes = readFileHeader(currentUser,online, parent); resources.push(onlineRes); // get the parent parent = onlineRes.getParent(); } // now create all parent folders, starting at the root folder while (resources.size()>0){ onlineRes=(CmsResource)resources.pop(); parent=onlineRes.getAbsolutePath(); // copy it to the offlineproject try { m_dbAccess.copyResourceToProject(currentProject, online, onlineRes); // read the offline-resource offlineRes = readFileHeader(currentUser,currentProject, parent); // copy the metainfos writeProperties(currentUser,currentProject,offlineRes.getAbsolutePath(), readAllProperties(currentUser,currentProject,onlineRes.getAbsolutePath())); chstate(currentUser,currentProject,offlineRes.getAbsolutePath(),C_STATE_UNCHANGED); } catch (CmsException exc) { // if the subfolder exists already - all is ok } } helperCopyResourceToProject(currentUser,online, currentProject, resource); } else { // no changes on the onlineproject! throw new CmsException("[" + this.getClass().getName() + "] " + currentProject.getName(), CmsException.C_NO_ACCESS); } } /** * Counts the locked resources in this project. * * <B>Security</B> * Only the admin or the owner of the project can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the project * @return the amount of locked resources in this project. * * @exception CmsException Throws CmsException if something goes wrong. */ public int countLockedResources(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { // read the project. CmsProject project = readProject(currentUser, currentProject, id); // check the security if( isAdmin(currentUser, currentProject) || isManagerOfProject(currentUser, project) || (project.getFlags() == C_PROJECT_STATE_UNLOCKED )) { // count locks return m_dbAccess.countLockedResources(project); } else { throw new CmsException("[" + this.getClass().getName() + "] " + id, CmsException.C_NO_ACCESS); } } public com.opencms.file.genericSql.CmsDbAccess createDbAccess(Configurations configurations) throws CmsException { return new com.opencms.file.genericSql.CmsDbAccess(configurations); } /** * Creates a new file with the given content and resourcetype. <br> * * Files can only be created in an offline project, the state of the new file * is set to NEW (2). <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the folder-resource is not locked by another user</li> * <li>the file dosn't exists</li> * </ul> * * @param currentUser The user who owns this file. * @param currentGroup The group who owns this file. * @param currentProject The project in which the resource will be used. * @param folder The complete path to the folder in which the new folder will * be created. * @param file The name of the new file (No pathinformation allowed). * @param contents The contents of the new file. * @param type The name of the resourcetype of the new file. * @param propertyinfos A Hashtable of propertyinfos, that should be set for this folder. * The keys for this Hashtable are the names for propertydefinitions, the values are * the values for the propertyinfos. * @return file The created file. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsFile createFile(CmsUser currentUser, CmsGroup currentGroup, CmsProject currentProject, String folder, String filename, byte[] contents, String type, Hashtable propertyinfos) throws CmsException { // check for mandatory metainfos checkMandatoryProperties(currentUser, currentProject, type, propertyinfos); // checks, if the filename is valid, if not it throws a exception validFilename(filename); CmsFolder cmsFolder = readFolder(currentUser,currentProject, folder); if( accessCreate(currentUser, currentProject, (CmsResource)cmsFolder) ) { // write-access was granted - create and return the file. CmsFile file = m_dbAccess.createFile(currentUser, currentProject, onlineProject(currentUser, currentProject), folder + filename, 0, cmsFolder.getResourceId(), contents, getResourceType(currentUser, currentProject, type)); // update the access flags Hashtable startSettings=null; Integer accessFlags=null; startSettings=(Hashtable)currentUser.getAdditionalInfo(C_ADDITIONAL_INFO_STARTSETTINGS); if (startSettings != null) { accessFlags=(Integer)startSettings.get(C_START_ACCESSFLAGS); if (accessFlags != null) { file.setAccessFlags(accessFlags.intValue()); } } if(currentGroup != null) { file.setGroupId(currentGroup.getId()); } m_dbAccess.writeFileHeader(currentProject, file,false); m_subresCache.clear(); // write the metainfos m_dbAccess.writeProperties(propertyinfos, file.getResourceId(), file.getType()); // inform about the file-system-change fileSystemChanged(false); return file ; } else { throw new CmsException("[" + this.getClass().getName() + "] " + folder + filename, CmsException.C_NO_ACCESS); } } /** * Creates a new folder. * If some mandatory propertydefinitions for the resourcetype are missing, a * CmsException will be thrown, because the file cannot be created without * the mandatory propertyinformations.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is not locked by another user</li> * </ul> * * @param currentUser The user who requested this method. * @param currentGroup The group who requested this method. * @param currentProject The current project of the user. * @param folder The complete path to the folder in which the new folder will * be created. * @param newFolderName The name of the new folder (No pathinformation allowed). * @param propertyinfos A Hashtable of propertyinfos, that should be set for this folder. * The keys for this Hashtable are the names for propertydefinitions, the values are * the values for the propertyinfos. * * @return file The created file. * * @exception CmsException will be thrown for missing propertyinfos, for worng propertydefs * or if the filename is not valid. The CmsException will also be thrown, if the * user has not the rights for this resource. */ public CmsFolder createFolder(CmsUser currentUser, CmsGroup currentGroup, CmsProject currentProject, String folder, String newFolderName, Hashtable propertyinfos) throws CmsException { // check for mandatory metainfos checkMandatoryProperties(currentUser, currentProject, C_TYPE_FOLDER_NAME, propertyinfos); // checks, if the filename is valid, if not it throws a exception validFilename(newFolderName); CmsFolder cmsFolder = readFolder(currentUser,currentProject, folder); if( accessCreate(currentUser, currentProject, (CmsResource)cmsFolder) ) { // write-acces was granted - create the folder. CmsFolder newFolder = m_dbAccess.createFolder(currentUser, currentProject, cmsFolder.getResourceId(), C_UNKNOWN_ID, folder + newFolderName + C_FOLDER_SEPERATOR, 0); // update the access flags Hashtable startSettings=null; Integer accessFlags=null; startSettings=(Hashtable)currentUser.getAdditionalInfo(C_ADDITIONAL_INFO_STARTSETTINGS); if (startSettings != null) { accessFlags=(Integer)startSettings.get(C_START_ACCESSFLAGS); if (accessFlags != null) { newFolder.setAccessFlags(accessFlags.intValue()); } } if(currentGroup != null) { newFolder.setGroupId(currentGroup.getId()); } newFolder.setState(C_STATE_NEW); m_dbAccess.writeFolder(currentProject, newFolder, false); m_subresCache.clear(); // write metainfos for the folder m_dbAccess.writeProperties(propertyinfos, newFolder.getResourceId(), newFolder.getType()); // writeProperties(currentUser,currentProject, newFolder.getAbsolutePath(), propertyinfos); // inform about the file-system-change fileSystemChanged(true); // return the folder return newFolder ; } else { throw new CmsException("[" + this.getClass().getName() + "] " + folder + newFolderName, CmsException.C_NO_ACCESS); } } /** * Creates a project. * * <B>Security</B> * Only the users which are in the admin or projectleader-group are granted. * * Changed: added the parent id * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the project to read. * @param description The description for the new project. * @param group the group to be set. * @param managergroup the managergroup to be set. * @param parentId the parent project * @exception CmsException Throws CmsException if something goes wrong. * @author Martin Langelund */ public CmsProject createProject(CmsUser currentUser, CmsProject currentProject, String name, String description, String groupname, String managergroupname) throws CmsException { if (isAdmin(currentUser, currentProject) || isProjectManager(currentUser, currentProject)) { // read the needed groups from the cms CmsGroup group = readGroup(currentUser, currentProject, groupname); CmsGroup managergroup = readGroup(currentUser, currentProject, managergroupname); // create a new task for the project CmsTask task = createProject(currentUser, name, 1, group.getName(), System.currentTimeMillis(), C_TASK_PRIORITY_NORMAL); return m_dbAccess.createProject(currentUser, group, managergroup, task, name, description, C_PROJECT_STATE_UNLOCKED, C_PROJECT_TYPE_NORMAL); } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_NO_ACCESS); } } /** * Creates a new project for task handling. * * @param currentUser User who creates the project * @param projectName Name of the project * @param projectType Type of the Project * @param role Usergroup for the project * @param timeout Time when the Project must finished * @param priority Priority for the Project * * @return The new task project * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsTask createProject(CmsUser currentUser, String projectName, int projectType, String roleName, long timeout, int priority) throws CmsException { CmsGroup role = null; // read the role if(roleName!=null && !roleName.equals("")) { role = readGroup(currentUser, null, roleName); } // create the timestamp java.sql.Timestamp timestamp = new java.sql.Timestamp(timeout); java.sql.Timestamp now = new java.sql.Timestamp(System.currentTimeMillis()); return m_dbAccess.createTask(0,0, 1, // standart project type, currentUser.getId(), currentUser.getId(), role.getId(), projectName, now, timestamp, priority); } // Methods working with properties and propertydefinitions /** * Creates the propertydefinition for the resource type.<BR/> * * <B>Security</B> * Only the admin can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the propertydefinition to overwrite. * @param resourcetype The name of the resource-type for the propertydefinition. * @param type The type of the propertydefinition (normal|mandatory|optional) * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsPropertydefinition createPropertydefinition(CmsUser currentUser, CmsProject currentProject, String name, String resourcetype, int type) throws CmsException { // check the security if( isAdmin(currentUser, currentProject) ) { // no space before or after the name name = name.trim(); // check the name validName(name, true); m_propertyDefVectorCache.clear(); return( m_dbAccess.createPropertydefinition(name, getResourceType(currentUser, currentProject, resourcetype), type) ); } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_NO_ACCESS); } } public void createResource(CmsProject project, CmsProject onlineProject, CmsResource resource) throws com.opencms.core.CmsException { m_dbAccess.createResource(project, onlineProject, resource); } /** * Creates a new task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param projectid The Id of the current project task of the user. * @param agentName User who will edit the task * @param roleName Usergroup for the task * @param taskName Name of the task * @param taskType Type of the task * @param taskComment Description of the task * @param timeout Time when the task must finished * @param priority Id for the priority * * @return A new Task Object * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsTask createTask(CmsUser currentUser, int projectid, String agentName, String roleName, String taskName, String taskComment, int taskType, long timeout, int priority) throws CmsException { CmsUser agent = m_dbAccess.readUser(agentName, C_USER_TYPE_SYSTEMUSER); CmsGroup role = m_dbAccess.readGroup(roleName); java.sql.Timestamp timestamp = new java.sql.Timestamp(timeout); java.sql.Timestamp now = new java.sql.Timestamp(System.currentTimeMillis()); CmsTask task = m_dbAccess.createTask(projectid, projectid, taskType, currentUser.getId(), agent.getId(), role.getId(), taskName, now, timestamp, priority); if(taskComment!=null && !taskComment.equals("")) { m_dbAccess.writeTaskLog(task.getId(), currentUser.getId(), new java.sql.Timestamp(System.currentTimeMillis()), taskComment, C_TASKLOG_USER); } return task; } /** * Creates a new task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param agent Username who will edit the task * @param role Usergroupname for the task * @param taskname Name of the task * @param taskcomment Description of the task. * @param timeout Time when the task must finished * @param priority Id for the priority * * @return A new Task Object * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsTask createTask(CmsUser currentUser, CmsProject currentProject, String agentName, String roleName, String taskname, String taskcomment, long timeout, int priority) throws CmsException { CmsGroup role = m_dbAccess.readGroup(roleName); java.sql.Timestamp timestamp = new java.sql.Timestamp(timeout); java.sql.Timestamp now = new java.sql.Timestamp(System.currentTimeMillis()); int agentId = C_UNKNOWN_ID; try { agentId = m_dbAccess.readUser(agentName, C_USER_TYPE_SYSTEMUSER).getId(); } catch (Exception e) { // ignore that this user doesn't exist and create a task for the role } return m_dbAccess.createTask(currentProject.getTaskId(), currentProject.getTaskId(), 1, // standart Task Type currentUser.getId(), agentId, role.getId(), taskname, now, timestamp, priority); } /** * Deletes all propertyinformation for a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to write the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformations * have to be deleted. * * @exception CmsException Throws CmsException if operation was not succesful */ public void deleteAllProperties(CmsUser currentUser, CmsProject currentProject, String resource) throws CmsException { // read the resource CmsResource res = readFileHeader(currentUser,currentProject, resource); // check the security if( ! accessWrite(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } // are there some mandatory metadefs? if(readAllPropertydefinitions(currentUser, currentProject,res.getType(), C_PROPERTYDEF_TYPE_MANDATORY).size() == 0 ) { // no - delete them all m_dbAccess.deleteAllProperties(res.getResourceId()); m_propertyCache.clear(); } else { // yes - throw exception throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_MANDATORY_PROPERTY); } } /** * Deletes a file in the Cms.<br> * * A file can only be deleteed in an offline project. * A file is deleted by setting its state to DELETED (3). <br> * * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callinUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The complete path of the file. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void deleteFile(CmsUser currentUser, CmsProject currentProject, String filename) throws CmsException { // read the file CmsResource onlineFile; CmsResource file = readFileHeader(currentUser,currentProject, filename); try { onlineFile = readFileHeader(currentUser,onlineProject(currentUser, currentProject), filename); } catch (CmsException exc) { // the file dosent exist onlineFile = null; } // has the user write-access? if( accessWrite(currentUser, currentProject, file) ) { // write-acces was granted - delete the file. // and the metainfos deleteAllProperties(currentUser,currentProject,file.getAbsolutePath()); if(onlineFile == null) { // the onlinefile dosent exist => remove the file realy! m_dbAccess.removeFile(currentProject.getId(), filename); } else { m_dbAccess.deleteFile(currentProject, filename); } // update the cache m_resourceCache.remove(C_FILE+currentProject.getId()+filename); m_subresCache.clear(); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_NO_ACCESS); } } /** * Deletes a folder in the Cms.<br> * * Only folders in an offline Project can be deleted. A folder is deleted by * setting its state to DELETED (3). <br> * * In its current implmentation, this method can ONLY delete empty folders. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read and write this resource and all subresources</li> * <li>the resource is not locked</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername The complete path of the folder. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void deleteFolder(CmsUser currentUser, CmsProject currentProject, String foldername) throws CmsException { CmsResource onlineFolder; // read the folder, that shold be deleted CmsFolder cmsFolder = readFolder(currentUser,currentProject,foldername); try { onlineFolder = readFolder(currentUser,onlineProject(currentUser, currentProject), foldername); } catch (CmsException exc) { // the file dosent exist onlineFolder = null; } // check, if the user may delete the resource if( accessWrite(currentUser, currentProject, cmsFolder) ) { // write-acces was granted - delete the folder and metainfos. deleteAllProperties(currentUser,currentProject, cmsFolder.getAbsolutePath()); if(onlineFolder == null) { // the onlinefile dosent exist => remove the file realy! m_dbAccess.removeFolder(cmsFolder); } else { m_dbAccess.deleteFolder(currentProject,cmsFolder, false); } // update cache m_resourceCache.remove(C_FOLDER+currentProject.getId()+foldername); m_subresCache.clear(); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(true); } else { throw new CmsException("[" + this.getClass().getName() + "] " + foldername, CmsException.C_NO_ACCESS); } } /** * Delete a group from the Cms.<BR/> * Only groups that contain no subgroups can be deleted. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param delgroup The name of the group that is to be deleted. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteGroup(CmsUser currentUser, CmsProject currentProject, String delgroup) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) ) { Vector childs=null; Vector users=null; // get all child groups of the group childs=getChild(currentUser,currentProject,delgroup); // get all users in this group users=getUsersOfGroup(currentUser,currentProject,delgroup); // delete group only if it has no childs and there are no users in this group. if ((childs == null) && ((users == null) || (users.size() == 0))) { m_dbAccess.deleteGroup(delgroup); m_groupCache.remove(delgroup); } else { throw new CmsException(delgroup, CmsException.C_GROUP_NOT_EMPTY); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + delgroup, CmsException.C_NO_ACCESS); } } /** * Deletes a project. * * <B>Security</B> * Only the admin or the owner of the project can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the project to be published. * * @exception CmsException Throws CmsException if something goes wrong. */ public void deleteProject(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { // read the project that should be deleted. CmsProject deleteProject = readProject(currentUser, currentProject, id); if(isAdmin(currentUser, currentProject) || isManagerOfProject(currentUser, deleteProject)) { // delete the project m_dbAccess.deleteProject(deleteProject); m_projectCache.remove(id); } else { throw new CmsException("[" + this.getClass().getName() + "] " + id, CmsException.C_NO_ACCESS); } } /** * Deletes a propertyinformation for a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to write the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformation * has to be read. * @param property The propertydefinition-name of which the propertyinformation has to be set. * * @exception CmsException Throws CmsException if operation was not succesful */ public void deleteProperty(CmsUser currentUser, CmsProject currentProject, String resource, String property) throws CmsException { // read the resource CmsResource res = readFileHeader(currentUser,currentProject, resource); // check the security if( ! accessWrite(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } // read the metadefinition CmsResourceType resType = getResourceType(currentUser,currentProject,res.getType()); CmsPropertydefinition metadef = readPropertydefinition(currentUser,currentProject,property, resType.getResourceName()); // is this a mandatory metadefinition? if( (metadef != null) && (metadef.getPropertydefType() != C_PROPERTYDEF_TYPE_MANDATORY ) ) { // no - delete the information m_dbAccess.deleteProperty(property,res.getResourceId(),res.getType()); // set the file-state to changed if(res.isFile()){ m_dbAccess.writeFileHeader(currentProject, (CmsFile) res, true); if (res.getState()==C_STATE_UNCHANGED) { res.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+resource,res); } else { if (res.getState()==C_STATE_UNCHANGED) { res.setState(C_STATE_CHANGED); } m_dbAccess.writeFolder(currentProject, readFolder(currentUser,currentProject, resource), true); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+resource,(CmsFolder)res); } m_subresCache.clear(); m_propertyCache.clear(); } else { // yes - throw exception throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_MANDATORY_PROPERTY); } } /** * Delete the propertydefinition for the resource type.<BR/> * * <B>Security</B> * Only the admin can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the propertydefinition to read. * @param resourcetype The name of the resource type for which the * propertydefinition is valid. * * @exception CmsException Throws CmsException if something goes wrong. */ public void deletePropertydefinition(CmsUser currentUser, CmsProject currentProject, String name, String resourcetype) throws CmsException { // check the security if( isAdmin(currentUser, currentProject) ) { // first read and then delete the metadefinition. m_propertyDefVectorCache.clear(); m_propertyDefCache.remove(name + (getResourceType(currentUser,currentProject,resourcetype)).getResourceType()); m_dbAccess.deletePropertydefinition( readPropertydefinition(currentUser,currentProject,name,resourcetype)); } else { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_NO_ACCESS); } } /** * Deletes a user from the Cms. * * Only a adminstrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param userId The Id of the user to be deleted. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteUser(CmsUser currentUser, CmsProject currentProject, int userId) throws CmsException { CmsUser user = readUser(currentUser,currentProject,userId); deleteUser(currentUser,currentProject,user.getName()); } /** * Deletes a user from the Cms. * * Only a adminstrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the user to be deleted. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteUser(CmsUser currentUser, CmsProject currentProject, String username) throws CmsException { // Test is this user is existing CmsUser user=readUser(currentUser,currentProject,username); // Check the security // Avoid to delete admin or guest-user if( isAdmin(currentUser, currentProject) && !(username.equals(C_USER_ADMIN) || username.equals(C_USER_GUEST))) { m_dbAccess.deleteUser(username); // delete user from cache m_userCache.remove(username+user.getType()); } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } /** * Deletes a web user from the Cms. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param userId The Id of the user to be deleted. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void deleteWebUser(CmsUser currentUser, CmsProject currentProject, int userId) throws CmsException { CmsUser user = readUser(currentUser,currentProject,userId); m_dbAccess.deleteUser(user.getName()); // delete user from cache m_userCache.remove(user.getName()+user.getType()); } /** * Destroys the resource broker and required modules and connections. * @exception CmsException Throws CmsException if something goes wrong. */ public void destroy() throws CmsException { // destroy the db-access. m_dbAccess.destroy(); } /** * Ends a task from the Cms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The ID of the task to end. * * @exception CmsException Throws CmsException if something goes wrong. */ public void endTask(CmsUser currentUser, CmsProject currentProject, int taskid) throws CmsException { m_dbAccess.endTask(taskid); if(currentUser == null) { m_dbAccess.writeSystemTaskLog(taskid, "Task finished."); } else { m_dbAccess.writeSystemTaskLog(taskid, "Task finished by " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } } /** * Exports cms-resources to zip. * * <B>Security:</B> * only Administrators can do this; * * @param currentUser user who requestd themethod * @param currentProject current project of the user * @param exportFile the name (absolute Path) of the export resource (zip) * @param exportPath the names (absolute Path) of folders and files which should be exported * @param cms the cms-object to use for the export. * * @exception Throws CmsException if something goes wrong. */ public void exportResources(CmsUser currentUser, CmsProject currentProject, String exportFile, String[] exportPaths, CmsObject cms) throws CmsException { if(isAdmin(currentUser, currentProject)) { new CmsExport(exportFile, exportPaths, cms); } else { throw new CmsException("[" + this.getClass().getName() + "] exportResources", CmsException.C_NO_ACCESS); } } /** * Exports cms-resources to zip. * * <B>Security:</B> * only Administrators can do this; * * @param currentUser user who requestd themethod * @param currentProject current project of the user * @param exportFile the name (absolute Path) of the export resource (zip) * @param exportPath the name (absolute Path) of folder from which should be exported * @param excludeSystem, decides whether to exclude the system * @param excludeUnchanged <code>true</code>, if unchanged files should be excluded. * @param cms the cms-object to use for the export. * * @exception Throws CmsException if something goes wrong. */ public void exportResources(CmsUser currentUser, CmsProject currentProject, String exportFile, String[] exportPaths, CmsObject cms, boolean excludeSystem, boolean excludeUnchanged) throws CmsException { if(isAdmin(currentUser, currentProject)) { new CmsExport(exportFile, exportPaths, cms, excludeSystem, excludeUnchanged); } else { throw new CmsException("[" + this.getClass().getName() + "] exportResources", CmsException.C_NO_ACCESS); } } // now private stuff /** * This method is called, when a resource was changed. Currently it counts the * changes. */ protected void fileSystemChanged(boolean folderChanged) { // count only the changes - do nothing else! // in the future here will maybe a event-story be added m_fileSystemChanges++; if(folderChanged){ m_fileSystemFolderChanges++; } } /** * Forwards a task to a new user. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to forward. * @param newRole The new Group for the task * @param newUser The new user who gets the task. if its "" the a new agent will automatic selected * * @exception CmsException Throws CmsException if something goes wrong. */ public void forwardTask(CmsUser currentUser, CmsProject currentProject, int taskid, String newRoleName, String newUserName) throws CmsException { CmsGroup newRole = m_dbAccess.readGroup(newRoleName); CmsUser newUser = null; if(newUserName.equals("")) { newUser = m_dbAccess.readUser(m_dbAccess.findAgent(newRole.getId())); } else { newUser = m_dbAccess.readUser(newUserName, C_USER_TYPE_SYSTEMUSER); } m_dbAccess.forwardTask(taskid, newRole.getId(), newUser.getId()); m_dbAccess.writeSystemTaskLog(taskid, "Task fowarded from " + currentUser.getFirstname() + " " + currentUser.getLastname() + " to " + newUser.getFirstname() + " " + newUser.getLastname() + "."); } /** * Returns all projects, which are owned by the user or which are accessible * for the group of the user. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return a Vector of projects. */ public Vector getAllAccessibleProjects(CmsUser currentUser, CmsProject currentProject) throws CmsException { // get all groups of the user Vector groups = getGroupsOfUser(currentUser, currentProject, currentUser.getName()); // get all projects which are owned by the user. Vector projects = m_dbAccess.getAllAccessibleProjectsByUser(currentUser); // get all projects, that the user can access with his groups. for(int i = 0; i < groups.size(); i++) { Vector projectsByGroup; // is this the admin-group? if( ((CmsGroup) groups.elementAt(i)).getName().equals(C_GROUP_ADMIN) ) { // yes - all unlocked projects are accessible for him projectsByGroup = m_dbAccess.getAllProjects(C_PROJECT_STATE_UNLOCKED); } else { // no - get all projects, which can be accessed by the current group projectsByGroup = m_dbAccess.getAllAccessibleProjectsByGroup((CmsGroup) groups.elementAt(i)); } // merge the projects to the vector for(int j = 0; j < projectsByGroup.size(); j++) { // add only projects, which are new if(!projects.contains(projectsByGroup.elementAt(j))) { projects.addElement(projectsByGroup.elementAt(j)); } } } // return the vector of projects return(projects); } /** * Returns all projects, which are owned by the user or which are manageable * for the group of the user. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return a Vector of projects. */ public Vector getAllManageableProjects(CmsUser currentUser, CmsProject currentProject) throws CmsException { // get all groups of the user Vector groups = getGroupsOfUser(currentUser, currentProject, currentUser.getName()); // get all projects which are owned by the user. Vector projects = m_dbAccess.getAllAccessibleProjectsByUser(currentUser); // get all projects, that the user can manage with his groups. for(int i = 0; i < groups.size(); i++) { // get all projects, which can be managed by the current group Vector projectsByGroup; // is this the admin-group? if( ((CmsGroup) groups.elementAt(i)).getName().equals(C_GROUP_ADMIN) ) { // yes - all unlocked projects are accessible for him projectsByGroup = m_dbAccess.getAllProjects(C_PROJECT_STATE_UNLOCKED); } else { // no - get all projects, which can be accessed by the current group projectsByGroup = m_dbAccess.getAllAccessibleProjectsByManagerGroup((CmsGroup)groups.elementAt(i)); } // merge the projects to the vector for(int j = 0; j < projectsByGroup.size(); j++) { // add only projects, which are new if(!projects.contains(projectsByGroup.elementAt(j))) { projects.addElement(projectsByGroup.elementAt(j)); } } } // remove the online-project, it is not manageable! projects.removeElement(onlineProject(currentUser, currentProject)); // return the vector of projects return(projects); } /** * Returns a Vector with all I_CmsResourceTypes. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * Returns a Hashtable with all I_CmsResourceTypes. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Hashtable getAllResourceTypes(CmsUser currentUser, CmsProject currentProject) throws CmsException { // check, if the resourceTypes were read bevore if(m_resourceTypes == null) { // read the resourceTypes from the propertys m_resourceTypes = (Hashtable) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_RESOURCE_TYPE); // remove the last index. m_resourceTypes.remove(C_TYPE_LAST_INDEX); } // return the resource-types. return(m_resourceTypes); } /** * Returns informations about the cache<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @return A hashtable with informations about the cache. */ public Hashtable getCacheInfo() { Hashtable info = new Hashtable(); info.put("UserCache",""+m_userCache.size()); info.put("GroupCache",""+m_groupCache.size()); info.put("UserGroupCache",""+m_usergroupsCache.size()); info.put("ResourceCache",""+m_resourceCache.size()); info.put("SubResourceCache",""+m_subresCache.size()); info.put("ProjectCache",""+m_projectCache.size()); info.put("PropertyCache",""+m_propertyCache.size()); info.put("PropertyDefinitionCache",""+m_propertyDefCache.size()); info.put("PropertyDefinitionVectorCache",""+m_propertyDefVectorCache.size()); info.put("AccessCache",""+m_accessCache.size()); return info; } /** * Returns all child groups of a group<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupname The name of the group. * @return groups A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChild(CmsUser currentUser, CmsProject currentProject, String groupname) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getChild(groupname); } else { throw new CmsException("[" + this.getClass().getName() + "] " + groupname, CmsException.C_NO_ACCESS); } } /** * Returns all child groups of a group<P/> * This method also returns all sub-child groups of the current group. * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupname The name of the group. * @return groups A Vector of all child groups or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getChilds(CmsUser currentUser, CmsProject currentProject, String groupname) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { Vector childs=new Vector(); Vector allChilds=new Vector(); Vector subchilds=new Vector(); CmsGroup group=null; // get all child groups if the user group childs=m_dbAccess.getChild(groupname); if (childs!=null) { allChilds=childs; // now get all subchilds for each group Enumeration enu=childs.elements(); while (enu.hasMoreElements()) { group=(CmsGroup)enu.nextElement(); subchilds=getChilds(currentUser,currentProject,group.getName()); //add the subchilds to the already existing groups Enumeration enusub=subchilds.elements(); while (enusub.hasMoreElements()) { group=(CmsGroup)enusub.nextElement(); allChilds.addElement(group); } } } return allChilds; } else { throw new CmsException("[" + this.getClass().getName() + "] " + groupname, CmsException.C_NO_ACCESS); } } // Method to access the configuration /** * Method to access the configurations of the properties-file. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The Configurations of the properties-file. */ public Configurations getConfigurations(CmsUser currentUser, CmsProject currentProject) { return m_configuration; } /** * Returns the list of groups to which the user directly belongs to<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @return Vector of groups * @exception CmsException Throws CmsException if operation was not succesful */ public Vector getDirectGroupsOfUser(CmsUser currentUser, CmsProject currentProject, String username) throws CmsException { return m_dbAccess.getGroupsOfUser(username); } /** * Returns a Vector with all files of a folder.<br> * * Files of a folder can be read from an offline Project and the online Project.<br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read this resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername the complete path to the folder. * * @return A Vector with all subfiles for the overgiven folder. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getFilesInFolder(CmsUser currentUser, CmsProject currentProject, String foldername) throws CmsException { Vector files; // Todo: add caching for getFilesInFolder //files=(Vector)m_subresCache.get(C_FILE+currentProject.getId()+foldername); //System.err.println("--fof:"+foldername+":"+files); //if ((files==null) || (files.size()==0)) { // try to get the files in the current project try { files = helperGetFilesInFolder(currentUser, currentProject, foldername); } catch (CmsException e) { //if access is denied to the folder, dont try to read them from the online project.) if (e.getType() == CmsException.C_ACCESS_DENIED) return new Vector(); //an empty vector. else //can't handle it here. throw e; } if (files == null) { //we are not allowed to read the folder (folder deleted) return new Vector(); } Vector onlineFiles = null; if (!currentProject.equals(onlineProject(currentUser, currentProject))) { // this is not the onlineproject, get the files // from the onlineproject, too try { onlineFiles = helperGetFilesInFolder(currentUser, onlineProject(currentUser, currentProject), foldername); // merge the resources } catch (CmsException exc) { if (exc.getType() != CmsException.C_ACCESS_DENIED) //cant handle it. throw exc; else //access denied. return files; } } if(onlineFiles == null) //if it was null, the folder was marked deleted -> no files in online project. return files; //m_subresCache.put(C_FILE+currentProject.getId()+foldername,files); return files = mergeResources(files, onlineFiles); } /** * Returns a Vector with all resource-names that have set the given property to the given value. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername the complete path to the folder. * @param propertydef, the name of the propertydefinition to check. * @param property, the value of the property for the resource. * * @return Vector with all names of resources. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getFilesWithProperty(CmsUser currentUser, CmsProject currentProject, String propertyDefinition, String propertyValue) throws CmsException { return m_dbAccess.getFilesWithProperty(currentProject.getId(), propertyDefinition, propertyValue); } /** * This method can be called, to determine if the file-system was changed * in the past. A module can compare its previosly stored number with this * returned number. If they differ, a change was made. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return the number of file-system-changes. */ public long getFileSystemChanges(CmsUser currentUser, CmsProject currentProject) { return m_fileSystemChanges; } /** * This method can be called, to determine if the file-system was changed * in the past. A module can compare its previosly stored number with this * returned number. If they differ, a change was made. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return the number of file-system-changes. */ public long getFileSystemFolderChanges(CmsUser currentUser, CmsProject currentProject) { return m_fileSystemFolderChanges; } /** * Returns a Vector with the complete folder-tree for this project.<br> * * Subfolders can be read from an offline project and the online project. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read this resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return subfolders A Vector with the complete folder-tree for this project. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getFolderTree(CmsUser currentUser, CmsProject currentProject) throws CmsException { Vector resources = m_dbAccess.getFolderTree(currentProject.getId()); Vector retValue = new Vector(resources.size()); String lastcheck = "#"; // just a char that is not valid in a filename //make sure that we have access to all these. for (Enumeration e = resources.elements(); e.hasMoreElements();) { CmsResource res = (CmsResource) e.nextElement(); if (!res.getAbsolutePath().startsWith(lastcheck)) { if (accessOther(currentUser, currentProject, res, C_ACCESS_PUBLIC_READ + C_ACCESS_PUBLIC_VISIBLE) || accessOwner(currentUser, currentProject, res, C_ACCESS_OWNER_READ + C_ACCESS_OWNER_VISIBLE) || accessGroup(currentUser, currentProject, res, C_ACCESS_GROUP_READ + C_ACCESS_GROUP_VISIBLE)) { retValue.addElement(res); } else { lastcheck = res.getAbsolutePath(); } } } return retValue; } /** * Returns all groups<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return users A Vector of all existing groups. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getGroups(CmsUser currentUser, CmsProject currentProject) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getGroups(); } else { throw new CmsException("[" + this.getClass().getName() + "] " + currentUser.getName(), CmsException.C_NO_ACCESS); } } /** * Returns a list of groups of a user.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @return Vector of groups * @exception CmsException Throws CmsException if operation was not succesful */ public Vector getGroupsOfUser(CmsUser currentUser, CmsProject currentProject, String username) throws CmsException { Vector allGroups; allGroups=(Vector)m_usergroupsCache.get(C_USER+username); if ((allGroups==null) || (allGroups.size()==0)) { CmsGroup subGroup; CmsGroup group; // get all groups of the user Vector groups=m_dbAccess.getGroupsOfUser(username); allGroups=groups; // now get all childs of the groups Enumeration enu = groups.elements(); while (enu.hasMoreElements()) { group=(CmsGroup)enu.nextElement(); subGroup=getParent(currentUser, currentProject,group.getName()); while(subGroup != null) { // is the subGroup already in the vector? if(!allGroups.contains(subGroup)) { // no! add it allGroups.addElement(subGroup); } // read next sub group subGroup = getParent(currentUser, currentProject,subGroup.getName()); } } m_usergroupsCache.put(C_USER+username,allGroups); } return allGroups; } /** * Returns the parent group of a group<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupname The name of the group. * @return group The parent group or null. * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsGroup getParent(CmsUser currentUser, CmsProject currentProject, String groupname) throws CmsException { CmsGroup group = readGroup(currentUser, currentProject, groupname); if (group.getParentId() == C_UNKNOWN_ID) { return null; } // try to read from cache CmsGroup parent = (CmsGroup) m_groupCache.get(group.getParentId()); if (parent == null) { parent = m_dbAccess.readGroup(group.getParentId()); m_groupCache.put(group.getParentId(), parent); } return parent; //return m_dbAccess.getParent(groupname); } /** * Returns the parent resource of a resouce. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The name of the file to be read. * * @return The file read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResource getParentResource(CmsUser currentUser, CmsProject currentProject, String resourcename) throws CmsException { // TODO: this can maybe done via the new parent id'd String parentresourceName = readFileHeader(currentUser, currentProject, resourcename).getParent(); return readFileHeader(currentUser, currentProject, parentresourceName); } /** * Gets the Registry.<BR/> * * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param cms The actual CmsObject * @exception Throws CmsException if access is not allowed. */ public I_CmsRegistry getRegistry(CmsUser currentUser, CmsProject currentProject, CmsObject cms) throws CmsException { return m_registry.clone(cms); } /** * Returns a Vector with the subresources for a folder.<br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read and view this resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param folder The name of the folder to get the subresources from. * * @return subfolders A Vector with resources. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getResourcesInFolder(CmsUser currentUser, CmsProject currentProject, String folder) throws CmsException { CmsFolder onlineFolder = null; CmsFolder offlineFolder = null; Vector resources = new Vector(); int resId1, resId2; try { onlineFolder = readFolder(currentUser, onlineProject(currentUser, currentProject), folder); if (onlineFolder.getState() == C_STATE_DELETED) { onlineFolder = null; } } catch (CmsException exc) { // ignore the exception - folder was not found in this project } try { offlineFolder = readFolder(currentUser, currentProject, folder); if (offlineFolder.getState() == C_STATE_DELETED) { offlineFolder = null; } } catch (CmsException exc) { // ignore the exception - folder was not found in this project } if ((offlineFolder == null) && (onlineFolder == null)) { // the folder is not existent throw new CmsException("[" + this.getClass().getName() + "] " + folder, CmsException.C_NOT_FOUND); } else if (onlineFolder == null) { resId1 = offlineFolder.getResourceId(); resId2 = offlineFolder.getResourceId(); } else if (offlineFolder == null) { resId1 = onlineFolder.getResourceId(); resId2 = onlineFolder.getResourceId(); } else { resId1 = onlineFolder.getResourceId(); resId2 = offlineFolder.getResourceId(); } resources = m_dbAccess.getResourcesInFolder(resId1, resId2); Vector retValue = new Vector(resources.size()); //make sure that we have access to all these. for (Enumeration e = resources.elements(); e.hasMoreElements();) { CmsResource res = (CmsResource) e.nextElement(); if (accessOther(currentUser, currentProject, res, C_ACCESS_PUBLIC_READ + C_ACCESS_PUBLIC_VISIBLE) || accessOwner(currentUser, currentProject, res, C_ACCESS_OWNER_READ + C_ACCESS_OWNER_VISIBLE) || accessGroup(currentUser, currentProject, res, C_ACCESS_GROUP_READ + C_ACCESS_GROUP_VISIBLE)) { retValue.addElement(res); } } return retValue; } /** * Returns a CmsResourceTypes. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourceType the id of the resourceType to get. * * Returns a CmsResourceTypes. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResourceType getResourceType(CmsUser currentUser, CmsProject currentProject, int resourceType) throws CmsException { // try to get the resource-type Hashtable types = getAllResourceTypes(currentUser, currentProject); Enumeration keys = types.keys(); CmsResourceType currentType; while(keys.hasMoreElements()) { currentType = (CmsResourceType) types.get(keys.nextElement()); if(currentType.getResourceType() == resourceType) { return(currentType); } } // was not found - throw exception throw new CmsException("[" + this.getClass().getName() + "] " + resourceType, CmsException.C_NOT_FOUND); } /** * Returns a CmsResourceTypes. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourceType the name of the resource to get. * * Returns a CmsResourceTypes. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResourceType getResourceType(CmsUser currentUser, CmsProject currentProject, String resourceType) throws CmsException { // try to get the resource-type try { CmsResourceType type = (CmsResourceType)getAllResourceTypes(currentUser, currentProject).get(resourceType); if(type == null) { throw new CmsException("[" + this.getClass().getName() + "] " + resourceType, CmsException.C_NOT_FOUND); } return type; } catch(NullPointerException exc) { // was not found - throw exception throw new CmsException("[" + this.getClass().getName() + "] " + resourceType, CmsException.C_NOT_FOUND); } } /** * Returns the session storage after a securtity check. * * <B>Security:</B> * All users except the guest user are granted. * * @param currentUser The user who requested this method. * @param storage The storage of all active users. * @return The storage of all active users or null. */ public CmsCoreSession getSessionStorage(CmsUser currentUser, CmsCoreSession storage) { if (currentUser.getName().equals(C_USER_GUEST)) { return null; } else { return storage; } } /** * Returns a Vector with all subfolders.<br> * * Subfolders can be read from an offline project and the online project. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read this resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername the complete path to the folder. * * @return subfolders A Vector with all subfolders for the given folder. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getSubFolders(CmsUser currentUser, CmsProject currentProject, String foldername) throws CmsException { Vector folders = new Vector(); // Todo: add caching for getSubFolders //folders=(Vector)m_subresCache.get(C_FOLDER+currentProject.getId()+foldername); if ((folders==null) || (folders.size()==0)){ folders=new Vector(); // try to get the folders in the current project try { folders = helperGetSubFolders(currentUser, currentProject, foldername); } catch (CmsException exc) { // no folders, ignoring them } if( !currentProject.equals(onlineProject(currentUser, currentProject))) { // this is not the onlineproject, get the files // from the onlineproject, too try { Vector onlineFolders = helperGetSubFolders(currentUser, onlineProject(currentUser, currentProject), foldername); // merge the resources folders = mergeResources(folders, onlineFolders); } catch(CmsException exc) { // no onlinefolders, ignoring them } } //m_subresCache.put(C_FOLDER+currentProject.getId()+foldername,folders); } // return the folders return(folders); } /** * Get a parameter value for a task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskId The Id of the task. * @param parName Name of the parameter. * * @exception CmsException Throws CmsException if something goes wrong. */ public String getTaskPar(CmsUser currentUser, CmsProject currentProject, int taskId, String parName) throws CmsException { return m_dbAccess.getTaskPar(taskId, parName); } /** * Get the template task id fo a given taskname. * * @param taskName Name of the Task * * @return id from the task template * * @exception CmsException Throws CmsException if something goes wrong. */ public int getTaskType(String taskName) throws CmsException { return m_dbAccess.getTaskType(taskName); } /** * Returns all users<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return users A Vector of all existing users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsers(CmsUser currentUser, CmsProject currentProject) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getUsers(C_USER_TYPE_SYSTEMUSER); } else { throw new CmsException("[" + this.getClass().getName() + "] " + currentUser.getName(), CmsException.C_NO_ACCESS); } } /** * Returns all users from a given type<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param type The type of the users. * @return users A Vector of all existing users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsers(CmsUser currentUser, CmsProject currentProject, int type) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getUsers(type); } else { throw new CmsException("[" + this.getClass().getName() + "] " + currentUser.getName(), CmsException.C_NO_ACCESS); } } /** * Returns all users from a given type that start with a specified string<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param type The type of the users. * @param namestart The filter for the username * @return users A Vector of all existing users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsers(CmsUser currentUser, CmsProject currentProject, int type, String namestart) throws CmsException { // check security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getUsers(type,namestart); } else { throw new CmsException("[" + this.getClass().getName() + "] " + currentUser.getName(), CmsException.C_NO_ACCESS); } } /** * Returns a list of users in a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupname The name of the group to list users from. * @return Vector of users. * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector getUsersOfGroup(CmsUser currentUser, CmsProject currentProject, String groupname) throws CmsException { // check the security if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) ) { return m_dbAccess.getUsersOfGroup(groupname, C_USER_TYPE_SYSTEMUSER); } else { throw new CmsException("[" + this.getClass().getName() + "] " + groupname, CmsException.C_NO_ACCESS); } } /** * A helper to copy a resource from the online project to a new, specified project.<br> * * @param onlineProject The online project. * @param offlineProject The offline project. * @param resource The name of the resource. * @exception CmsException Throws CmsException if operation was not succesful. */ protected void helperCopyResourceToProject(CmsUser currentUser, CmsProject onlineProject, CmsProject offlineProject, String resource) throws CmsException { try { // read the online-resource CmsResource onlineRes = readFileHeader(currentUser,onlineProject, resource); // copy it to the offlineproject m_dbAccess.copyResourceToProject(offlineProject, onlineProject,onlineRes); // read the offline-resource CmsResource offlineRes = readFileHeader(currentUser,offlineProject, resource); // copy the metainfos m_dbAccess.writeProperties(readAllProperties(currentUser,onlineProject,onlineRes.getAbsolutePath()),offlineRes.getResourceId(),offlineRes.getType()); //currentUser,offlineProject,offlineRes.getAbsolutePath(), readAllProperties(currentUser,onlineProject,onlineRes.getAbsolutePath())); offlineRes.setState(C_STATE_UNCHANGED); if (offlineRes instanceof CmsFolder) { m_dbAccess.writeFolder(offlineProject,(CmsFolder)offlineRes,false); // update the cache m_resourceCache.put(C_FOLDER+offlineProject.getId()+offlineRes.getName(),(CmsFolder)offlineRes); } else { //(offlineRes instanceof CmsFile) m_dbAccess.writeFileHeader(offlineProject,(CmsFile)offlineRes,false); // update the cache m_resourceCache.put(C_FILE+offlineProject.getId()+offlineRes.getName(),offlineRes); } m_subresCache.clear(); // inform about the file-system-change fileSystemChanged(true); // now walk recursive through all files and folders, and copy them too if(onlineRes.isFolder()) { Vector files = getFilesInFolder(currentUser,onlineProject, resource); Vector folders = getSubFolders(currentUser,onlineProject, resource); for(int i = 0; i < folders.size(); i++) { helperCopyResourceToProject(currentUser,onlineProject, offlineProject, ((CmsResource)folders.elementAt(i)).getAbsolutePath()); } for(int i = 0; i < files.size(); i++) { helperCopyResourceToProject(currentUser,onlineProject, offlineProject, ((CmsResource)files.elementAt(i)).getAbsolutePath()); } } } catch (CmsException exc) { exc.printStackTrace(); } } /** * A helper method for this resource-broker. * Returns a Vector with all files of a folder. * The method does not read any files from the parrent folder, * and do also return deleted files. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername the complete path to the folder. * * @return subfiles A Vector with all subfiles for the overgiven folder. * * @exception CmsException Throws CmsException if operation was not succesful. */ protected Vector helperGetFilesInFolder(CmsUser currentUser, CmsProject currentProject, String foldername) throws CmsException { // get the folder CmsFolder cmsFolder = null; try { cmsFolder = readFolder(currentUser,currentProject, currentProject.getId(), foldername); } catch(CmsException exc) { if(exc.getType() == exc.C_NOT_FOUND) { // ignore the exception - file dosen't exist in this project return new Vector(); //just an empty vector. } else { throw exc; } } if (cmsFolder.getState() == I_CmsConstants.C_STATE_DELETED) { //indicate that the folder was found, but deleted, and resources are not avaiable. return null; } Vector _files = m_dbAccess.getFilesInFolder(cmsFolder); Vector files = new Vector(_files.size()); //make sure that we have access to all these. for (Enumeration e = _files.elements();e.hasMoreElements();) { CmsFile file = (CmsFile) e.nextElement(); if( accessOther(currentUser, currentProject, (CmsResource)file, C_ACCESS_PUBLIC_READ) || accessOwner(currentUser, currentProject, (CmsResource)file, C_ACCESS_OWNER_READ) || accessGroup(currentUser, currentProject, (CmsResource)file, C_ACCESS_GROUP_READ) ) { files.addElement(file); } } return files; } /** * A helper method for this resource-broker. * Returns a Hashtable with all subfolders.<br> * * Subfolders can be read from an offline project and the online project. <br> * * @param currentUser The user who requested this method. * @param currentProject The current project to read the folders from. * @param foldername the complete path to the folder. * * @return subfolders A Hashtable with all subfolders for the given folder. * * @exception CmsException Throws CmsException if operation was not succesful. */ protected Vector helperGetSubFolders(CmsUser currentUser, CmsProject currentProject, String foldername) throws CmsException{ CmsFolder cmsFolder = readFolder(currentUser,currentProject,currentProject.getId(),foldername); if( accessRead(currentUser, currentProject, (CmsResource)cmsFolder) ) { // acces to all subfolders was granted - return the sub-folders. Vector folders = m_dbAccess.getSubFolders(cmsFolder); CmsFolder folder; for(int z=0 ; z < folders.size() ; z++) { // read the current folder folder = (CmsFolder)folders.elementAt(z); // check the readability for the folder if( !( accessOther(currentUser, currentProject, (CmsResource)folder, C_ACCESS_PUBLIC_READ) || accessOwner(currentUser, currentProject, (CmsResource)folder, C_ACCESS_OWNER_READ) || accessGroup(currentUser, currentProject, (CmsResource)folder, C_ACCESS_GROUP_READ) ) ) { // access to the folder was not granted delete him folders.removeElementAt(z); // correct the index z } } return folders; } else { throw new CmsException("[" + this.getClass().getName() + "] " + foldername, CmsException.C_ACCESS_DENIED); } } /** * Imports a import-resource (folder or zipfile) to the cms. * * <B>Security:</B> * only Administrators can do this; * * @param currentUser user who requestd themethod * @param currentProject current project of the user * @param importFile the name (absolute Path) of the import resource (zip or folder) * @param importPath the name (absolute Path) of folder in which should be imported * @param cms the cms-object to use for the import. * * @exception Throws CmsException if something goes wrong. */ public void importFolder(CmsUser currentUser, CmsProject currentProject, String importFile, String importPath, CmsObject cms) throws CmsException { if(isAdmin(currentUser, currentProject)) { new CmsImportFolder(importFile, importPath, cms); } else { throw new CmsException("[" + this.getClass().getName() + "] importResources", CmsException.C_NO_ACCESS); } } // Methods working with database import and export /** * Imports a import-resource (folder or zipfile) to the cms. * * <B>Security:</B> * only Administrators can do this; * * @param currentUser user who requestd themethod * @param currentProject current project of the user * @param importFile the name (absolute Path) of the import resource (zip or folder) * @param importPath the name (absolute Path) of folder in which should be imported * @param cms the cms-object to use for the import. * * @exception Throws CmsException if something goes wrong. */ public void importResources(CmsUser currentUser, CmsProject currentProject, String importFile, String importPath, CmsObject cms) throws CmsException { if(isAdmin(currentUser, currentProject)) { CmsImport imp = new CmsImport(importFile, importPath, cms); imp.importResources(); } else { throw new CmsException("[" + this.getClass().getName() + "] importResources", CmsException.C_NO_ACCESS); } } // Internal ResourceBroker methods /** * Initializes the resource broker and sets up all required modules and connections. * @param config The OpenCms configuration. * @exception CmsException Throws CmsException if something goes wrong. */ public void init(Configurations config) throws CmsException { // Store the configuration. m_configuration = config; if (config.getString("publishproject.delete", "false").toLowerCase().equals("true")) { m_deletePublishedProject = true; } // initialize the access-module. if(A_OpenCms.isLogging()) { A_OpenCms.log(I_CmsLogChannels.C_OPENCMS_INIT, "[CmsResourceBroker] init the dbaccess-module."); } m_dbAccess = createDbAccess(config); // initalize the caches m_userCache=new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".user", 50)); m_groupCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".group", 50)); m_usergroupsCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".usergroups", 50)); m_projectCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".project", 50)); m_onlineProjectCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".onlineproject", 50)); m_resourceCache=new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".resource", 1000)); m_subresCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".subres", 100)); m_propertyCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".property", 1000)); m_propertyDefCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".propertydef", 100)); m_propertyDefVectorCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".propertyvectordef", 100)); m_accessCache = new CmsCache(config.getInteger(C_CONFIGURATION_CACHE + ".access", 1000)); m_cachelimit = config.getInteger(C_CONFIGURATION_CACHE + ".maxsize", 20000); m_refresh=config.getString(C_CONFIGURATION_CACHE + ".refresh", ""); // initialize the registry if(A_OpenCms.isLogging()) { A_OpenCms.log(I_CmsLogChannels.C_OPENCMS_INIT, "[CmsResourceBroker] init registry."); } try { m_registry= new CmsRegistry(config.getString(C_CONFIGURATION_REGISTRY)); } catch (CmsException ex) { throw ex; } catch(Exception ex) { // init of registry failed - throw exception throw new CmsException("Init of registry failed", CmsException.C_REGISTRY_ERROR, ex); } } /** * Determines, if the users current group is the admin-group. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return true, if the users current group is the admin-group, * else it returns false. * @exception CmsException Throws CmsException if operation was not succesful. */ public boolean isAdmin(CmsUser currentUser, CmsProject currentProject) throws CmsException { return userInGroup(currentUser, currentProject,currentUser.getName(), C_GROUP_ADMIN); } /** * Determines, if the users may manage a project.<BR/> * Only the manager of a project may publish it. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return true, if the may manage this project. * @exception CmsException Throws CmsException if operation was not succesful. */ public boolean isManagerOfProject(CmsUser currentUser, CmsProject currentProject) throws CmsException { // is the user owner of the project? if( currentUser.getId() == currentProject.getOwnerId() ) { // YES return true; } // get all groups of the user Vector groups = getGroupsOfUser(currentUser, currentProject, currentUser.getName()); for(int i = 0; i < groups.size(); i++) { // is this a managergroup for this project? if( ((CmsGroup)groups.elementAt(i)).getId() == currentProject.getManagerGroupId() ) { // this group is manager of the project return true; } } // this user is not manager of this project return false; } /** * Determines, if the users current group is the projectleader-group.<BR/> * All projectleaders can create new projects, or close their own projects. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return true, if the users current group is the projectleader-group, * else it returns false. * @exception CmsException Throws CmsException if operation was not succesful. */ public boolean isProjectManager(CmsUser currentUser, CmsProject currentProject) throws CmsException { return userInGroup(currentUser, currentProject,currentUser.getName(), C_GROUP_PROJECTLEADER); } /** * Returns the user, who had locked the resource.<BR/> * * A user can lock a resource, so he is the only one who can write this * resource. This methods checks, if a resource was locked. * * @param user The user who wants to lock the file. * @param project The project in which the resource will be used. * @param resource The resource. * * @return the user, who had locked the resource. * * @exception CmsException will be thrown, if the user has not the rights * for this resource. */ public CmsUser lockedBy(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { return readUser(currentUser,currentProject,resource.isLockedBy() ) ; } /** * Returns the user, who had locked the resource.<BR/> * * A user can lock a resource, so he is the only one who can write this * resource. This methods checks, if a resource was locked. * * @param user The user who wants to lock the file. * @param project The project in which the resource will be used. * @param resource The complete path to the resource. * * @return the user, who had locked the resource. * * @exception CmsException will be thrown, if the user has not the rights * for this resource. */ public CmsUser lockedBy(CmsUser currentUser, CmsProject currentProject, String resource) throws CmsException { return readUser(currentUser,currentProject,readFileHeader(currentUser, currentProject, resource).isLockedBy() ) ; } /** * Locks a resource.<br> * * Only a resource in an offline project can be locked. The state of the resource * is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * A user can lock a resource, so he is the only one who can write this * resource. <br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is not locked by another user</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The complete path to the resource to lock. * @param force If force is true, a existing locking will be oberwritten. * * @exception CmsException Throws CmsException if operation was not succesful. * It will also be thrown, if there is a existing lock * and force was set to false. */ public void lockResource(CmsUser currentUser, CmsProject currentProject, String resourcename, boolean force) throws CmsException { CmsResource cmsResource=null; // read the resource, that shold be locked if (resourcename.endsWith("/")) { cmsResource = (CmsFolder)readFolder(currentUser,currentProject,resourcename); } else { cmsResource = (CmsFile)readFileHeader(currentUser,currentProject,resourcename); } // Can't lock what isn't there if (cmsResource == null) throw new CmsException(CmsException.C_NOT_FOUND); // check, if the resource is in the offline-project if(cmsResource.getProjectId() != currentProject.getId()) { // the resource is not in the current project and can't be locked - so ignore. return; } // check, if the user may lock the resource if( accessLock(currentUser, currentProject, cmsResource) ) { if(cmsResource.isLocked()) { //if (cmsResource.isLockedBy()!=currentUser.getId()) { // if the force switch is not set, throw an exception if (force==false) { throw new CmsException("["+this.getClass().getName()+"] "+resourcename,CmsException.C_LOCKED); } } // lock the resouece cmsResource.setLocked(currentUser.getId()); //update resource m_dbAccess.updateLockstate(cmsResource); // update the cache if (resourcename.endsWith("/")) { //m_dbAccess.writeFolder(currentProject,(CmsFolder)cmsResource,false); m_resourceCache.put(C_FOLDER+currentProject.getId()+resourcename,(CmsFolder)cmsResource); } else { //m_dbAccess.writeFileHeader(currentProject,onlineProject(currentUser, currentProject),(CmsFile)cmsResource,false); m_resourceCache.put(C_FILE+currentProject.getId()+resourcename,(CmsFile)cmsResource); } m_subresCache.clear(); // if this resource is a folder -> lock all subresources, too if(cmsResource.isFolder()) { Vector files = getFilesInFolder(currentUser,currentProject, cmsResource.getAbsolutePath()); Vector folders = getSubFolders(currentUser,currentProject, cmsResource.getAbsolutePath()); CmsResource currentResource; // lock all files in this folder for(int i = 0; i < files.size(); i++ ) { currentResource = (CmsResource)files.elementAt(i); if (currentResource.getState() != C_STATE_DELETED) { lockResource(currentUser, currentProject, currentResource.getAbsolutePath(), true); } } // lock all files in this folder for(int i = 0; i < folders.size(); i++) { currentResource = (CmsResource)folders.elementAt(i); if (currentResource.getState() != C_STATE_DELETED) { lockResource(currentUser, currentProject, currentResource.getAbsolutePath(), true); } } } } else { throw new CmsException("[" + this.getClass().getName() + "] " + resourcename, CmsException.C_NO_ACCESS); } } // Methods working with user and groups /** * Logs a user into the Cms, if the password is correct. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user to be returned. * @param password The password of the user to be returned. * @return the logged in user. * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser loginUser(CmsUser currentUser, CmsProject currentProject, String username, String password) throws CmsException { CmsUser newUser = readUser(currentUser, currentProject, username, password); // is the user enabled? if( newUser.getFlags() == C_FLAG_ENABLED ) { // Yes - log him in! // first write the lastlogin-time. newUser.setLastlogin(new Date().getTime()); // write the user back to the cms. m_dbAccess.writeUser(newUser); // update cache m_userCache.put(newUser.getName()+newUser.getType(),newUser); return(newUser); } else { // No Access! throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS ); } } /** * Logs a web user into the Cms, if the password is correct. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user to be returned. * @param password The password of the user to be returned. * @return the logged in user. * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser loginWebUser(CmsUser currentUser, CmsProject currentProject, String username, String password) throws CmsException { CmsUser newUser = readWebUser(currentUser, currentProject, username, password); // is the user enabled? if( newUser.getFlags() == C_FLAG_ENABLED ) { // Yes - log him in! // first write the lastlogin-time. newUser.setLastlogin(new Date().getTime()); // write the user back to the cms. m_dbAccess.writeUser(newUser); // update cache m_userCache.put(newUser.getName()+newUser.getType(),newUser); return(newUser); } else { // No Access! throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS ); } } /** * Merges two resource-vectors into one vector. * All offline-resources will be putted to the return-vector. All additional * online-resources will be putted to the return-vector, too. All online resources, * which are present in the offline-vector will be ignored. * * * @param offline The vector with the offline resources. * @param online The vector with the online resources. * @return The merged vector. */ protected Vector mergeResources(Vector offline, Vector online) { //dont do anything if any of the given vectors are empty or null. if ((offline == null) || (offline.size() == 0)) return (online!=null)?online:new Vector(); if ((online == null) || (online.size() == 0)) return (offline!=null)?offline:new Vector(); // create a vector for the merged offline //remove all objects in the online vector that are present in the offline vector. for (Enumeration e=offline.elements();e.hasMoreElements();) { CmsResource cr = (CmsResource) e.nextElement(); Resource r = new Resource(cr.getAbsolutePath()); online.removeElement(r); } //merge the two vectors. If both vectors were sorted, the mereged vector will remain sorted. Vector merged = new Vector(offline.size() + online.size()); int offIndex = 0; int onIndex = 0; while ((offIndex < offline.size()) || (onIndex < online.size())) { if (offIndex >= offline.size()) { merged.addElement(online.elementAt(onIndex++)); continue; } if (onIndex >= online.size()) { merged.addElement(offline.elementAt(offIndex++)); continue; } String on = ((CmsResource)online.elementAt(onIndex)).getAbsolutePath(); String off = ((CmsResource)offline.elementAt(offIndex)).getAbsolutePath(); if (on.compareTo(off) < 0) merged.addElement(online.elementAt(onIndex++)); else merged.addElement(offline.elementAt(offIndex++)); } return(merged); } /** * Moves the file. * * This operation includes a copy and a delete operation. These operations * are done with their security-checks. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param source The complete path of the sourcefile. * @param destination The complete path of the destinationfile. * * @exception CmsException will be thrown, if the file couldn't be moved. * The CmsException will also be thrown, if the user has not the rights * for this resource. */ public void moveFile(CmsUser currentUser, CmsProject currentProject, String source, String destination) throws CmsException { // read the file to check access CmsResource file = readFileHeader(currentUser,currentProject, source); // has the user write-access? if (accessWrite(currentUser, currentProject, file)) { // first copy the file, this may ends with an exception copyFile(currentUser, currentProject, source, destination); // then delete the source-file, this may end with an exception // => the file was only copied, not moved! deleteFile(currentUser, currentProject, source); // inform about the file-system-change fileSystemChanged(file.isFolder()); } else { throw new CmsException("[" + this.getClass().getName() + "] " + source, CmsException.C_NO_ACCESS); } } /** * Returns the onlineproject. All anonymous * (CmsUser callingUser, or guest) users will see the resources of this project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return the onlineproject object. * @exception CmsException Throws CmsException if something goes wrong. */ public CmsProject onlineProject(CmsUser currentUser, CmsProject currentProject) throws CmsException { CmsProject project = null; // try to get the online project for this offline project from cache project = (CmsProject) m_onlineProjectCache.get(currentProject.getId()); if (project == null) { // the project was not in the cache // lookup the currentProject in the CMS_SITE_PROJECT table, and in the same call return it. project = m_dbAccess.getOnlineProject(currentProject.getId()); // store the project into the cache m_onlineProjectCache.put(currentProject.getId(), project); } return project; } /** * Publishes a project. * * <B>Security</B> * Only the admin or the owner of the project can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the project to be published. * @return a vector of changed resources. * * @exception CmsException Throws CmsException if something goes wrong. */ public void publishProject(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { CmsProject publishProject = readProject(currentUser, currentProject, id); // check the security if ((isAdmin(currentUser, currentProject) || isManagerOfProject(currentUser, publishProject)) && (publishProject.getFlags() == C_PROJECT_STATE_UNLOCKED)) { m_dbAccess.publishProject(currentUser, id, onlineProject(currentUser, currentProject)); m_subresCache.clear(); // inform about the file-system-change fileSystemChanged(true); // the project-state will be set to "published", the date will be set. // the project must be written to the cms. CmsProject project = readProject(currentUser, currentProject, id); project.setFlags(C_PROJECT_STATE_ARCHIVE); project.setPublishingDate(new Date().getTime()); project.setPublishedBy(currentUser.getId()); m_dbAccess.writeProject(project); m_projectCache.put(project.getId(), project); // finally set the refrish signal to another server if nescessary if (m_refresh.length() > 0) { try { URL url = new URL(m_refresh); URLConnection con = url.openConnection(); con.connect(); InputStream in = con.getInputStream(); in.close(); System.err.println(in.toString()); } catch (Exception ex) { throw new CmsException(0, ex); } } // HACK: now currently we can delete the project to decrease the amount of data in the db if (m_deletePublishedProject) { deleteProject(currentUser, currentProject, id); } } else { throw new CmsException("[" + this.getClass().getName() + "] could not publish project " + id, CmsException.C_NO_ACCESS); } } /** * Reads the agent of a task from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param task The task to read the agent from. * @return The owner of a task. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readAgent(CmsUser currentUser, CmsProject currentProject, CmsTask task) throws CmsException { return readUser(currentUser,currentProject,task.getAgentUser()); } /** * Reads all file headers of a file in the OpenCms.<BR> * This method returns a vector with the histroy of all file headers, i.e. * the file headers of a file, independent of the project they were attached to.<br> * * The reading excludes the filecontent. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The name of the file to be read. * * @return Vector of file headers read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. */ public Vector readAllFileHeaders(CmsUser currentUser, CmsProject currentProject, String filename) throws CmsException { CmsResource cmsFile = readFileHeader(currentUser,currentProject, filename); if( accessRead(currentUser, currentProject, cmsFile) ) { // acces to all subfolders was granted - return the file-history. return(m_dbAccess.readAllFileHeaders(filename)); } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_ACCESS_DENIED); } } /** * Returns a list of all propertyinformations of a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to view the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformation has to be * read. * * @return Vector of propertyinformation as Strings. * * @exception CmsException Throws CmsException if operation was not succesful */ public Hashtable readAllProperties(CmsUser currentUser, CmsProject currentProject, String resource) throws CmsException { CmsResource res; // read the resource from the currentProject, or the online-project try { res = readFileHeader(currentUser,currentProject, resource); } catch(CmsException exc) { // the resource was not readable if(currentProject.equals(onlineProject(currentUser, currentProject))) { // this IS the onlineproject - throw the exception throw exc; } else { // try to read the resource in the onlineproject res = readFileHeader(currentUser,onlineProject(currentUser, currentProject), resource); } } // check the security if( ! accessRead(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } Hashtable returnValue = null; returnValue = (Hashtable)m_propertyCache.get(Integer.toString(res.getResourceId()) +"_"+ Integer.toString(res.getType())); if (returnValue == null){ returnValue = m_dbAccess.readAllProperties(res.getResourceId(),res.getType()); m_propertyCache.put(Integer.toString(res.getResourceId()) +"_"+ Integer.toString(res.getType()),returnValue); } return returnValue; } /** * Reads all propertydefinitions for the given resource type. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id the resource type to read the propertydefinitions for. * @param type The type of the propertydefinition (normal|mandatory|optional). * * @return propertydefinitions A Vector with propertydefefinitions for the resource type. * The Vector is maybe empty. * * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readAllPropertydefinitions(CmsUser currentUser, CmsProject currentProject, int id, int type) throws CmsException { Vector returnValue = null; returnValue = (Vector) m_propertyDefVectorCache.get(Integer.toString(id) + "_" + Integer.toString(type)); if (returnValue == null){ returnValue = m_dbAccess.readAllPropertydefinitions(id,type); m_propertyDefVectorCache.put(Integer.toString(id) + "_" + Integer.toString(type), returnValue); } return returnValue; } /** * Reads all propertydefinitions for the given resource type. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourcetype The name of the resource type to read the propertydefinitions for. * * @return propertydefinitions A Vector with propertydefefinitions for the resource type. * The Vector is maybe empty. * * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readAllPropertydefinitions(CmsUser currentUser, CmsProject currentProject, String resourcetype) throws CmsException { Vector returnValue = null; CmsResourceType resType = getResourceType(currentUser, currentProject, resourcetype); returnValue = (Vector)m_propertyDefVectorCache.get(resType.getResourceName()); if (returnValue == null){ returnValue = m_dbAccess.readAllPropertydefinitions(resType); m_propertyDefVectorCache.put(resType.getResourceName(), returnValue); } return returnValue; } /** * Reads all propertydefinitions for the given resource type. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resourcetype The name the resource type to read the propertydefinitions for. * @param type The type of the propertydefinition (normal|mandatory|optional). * * @return propertydefinitions A Vector with propertydefefinitions for the resource type. * The Vector is maybe empty. * * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readAllPropertydefinitions(CmsUser currentUser, CmsProject currentProject, String resourcetype, int type) throws CmsException { CmsResourceType restype=getResourceType(currentUser,currentProject,resourcetype); return readAllPropertydefinitions(currentUser, currentProject, restype.getResourceType(),type); } // Methods working with system properties /** * Reads the export-path for the system. * This path is used for db-export and db-import. * * <B>Security:</B> * All users are granted.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return the exportpath. */ public String readExportPath(CmsUser currentUser, CmsProject currentProject) throws CmsException { return (String) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_EXPORTPATH); } /** * Reads a file from a previous project of the Cms.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the project to read the file from. * @param filename The name of the file to be read. * * @return The file read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. * */ public CmsFile readFile(CmsUser currentUser, CmsProject currentProject, int projectId, String filename) throws CmsException { CmsFile cmsFile = null; // read the resource from the projectId, try { //cmsFile=(CmsFile)m_resourceCache.get(C_FILECONTENT+projectId+filename); if (cmsFile == null) { cmsFile = m_dbAccess.readFile(projectId, onlineProject(currentUser, currentProject).getId(), filename); // only put it in thecache until the size is below the max site /*if (cmsFile.getContents().length <m_cachelimit) { m_resourceCache.put(C_FILECONTENT+projectId+filename,cmsFile); } else { }*/ } if (accessRead(currentUser, currentProject, (CmsResource) cmsFile)) { // acces to all subfolders was granted - return the file. return cmsFile; } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_ACCESS_DENIED); } } catch (CmsException exc) { throw exc; } } /** * Reads a file from the Cms.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The name of the file to be read. * * @return The file read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. * */ public CmsFile readFile(CmsUser currentUser, CmsProject currentProject, String filename) throws CmsException { CmsFile cmsFile = null; // read the resource from the currentProject, or the online-project try { //cmsFile=(CmsFile)m_resourceCache.get(C_FILECONTENT+currentProject.getId()+filename); if (cmsFile == null) { cmsFile = m_dbAccess.readFile(currentProject.getId(), onlineProject(currentUser, currentProject).getId(), filename); // only put it in thecache until the size is below the max site /*if (cmsFile.getContents().length <m_cachelimit) { m_resourceCache.put(C_FILECONTENT+currentProject.getId()+filename,cmsFile); } else { }*/ } } catch (CmsException exc) { // the resource was not readable if (exc.getType() == CmsException.C_RESOURCE_DELETED) { //resource deleted throw exc; } else if (currentProject.equals(onlineProject(currentUser, currentProject))) { // this IS the onlineproject - throw the exception throw exc; } else { // try to read the resource in the onlineproject cmsFile = m_dbAccess.readFile(onlineProject(currentUser, currentProject).getId(), onlineProject(currentUser, currentProject).getId(), filename); } } if (accessRead(currentUser, currentProject, (CmsResource) cmsFile)) { // acces to all subfolders was granted - return the file. return cmsFile; } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_ACCESS_DENIED); } } /** * Gets the known file extensions (=suffixes) * * <B>Security:</B> * All users are granted access<BR/> * * @param currentUser The user who requested this method, not used here * @param currentProject The current project of the user, not used here * * @return Hashtable with file extensions as Strings */ public Hashtable readFileExtensions(CmsUser currentUser, CmsProject currentProject) throws CmsException { Hashtable res=(Hashtable) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS); return ( (res!=null)? res : new Hashtable()); } /** * Reads a file header a previous project of the Cms.<BR/> * The reading excludes the filecontent. <br> * * A file header can be read from an offline project or the online project. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the project to read the file from. * @param filename The name of the file to be read. * * @return The file read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResource readFileHeader(CmsUser currentUser, CmsProject currentProject, int projectId, String filename) throws CmsException { CmsResource cmsFile; // read the resource from the currentProject, or the online-project try { cmsFile=(CmsResource)m_resourceCache.get(C_FILE+projectId+filename); if (cmsFile==null) { cmsFile = m_dbAccess.readFileHeader(projectId, filename); m_resourceCache.put(C_FILE+projectId+filename,cmsFile); } if( accessRead(currentUser, currentProject, cmsFile) ) { // acces to all subfolders was granted - return the file-header. return cmsFile; } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_ACCESS_DENIED); } } catch(CmsException exc) { throw exc; } } /** * Reads a file header from the Cms.<BR/> * The reading excludes the filecontent. <br> * * A file header can be read from an offline project or the online project. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param filename The name of the file to be read. * * @return The file read from the Cms. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsResource readFileHeader(CmsUser currentUser, CmsProject currentProject, String filename) throws CmsException { CmsResource cmsFile; // check if this method is misused to read a folder if (filename.endsWith("/")) { return (CmsResource) readFolder(currentUser,currentProject,filename); } // read the resource from the currentProject, or the online-project try { // try to read form cache first cmsFile=(CmsResource)m_resourceCache.get(C_FILE+currentProject.getId()+filename); if (cmsFile==null) { cmsFile = m_dbAccess.readFileHeader(currentProject.getId(), filename); m_resourceCache.put(C_FILE+currentProject.getId()+filename,cmsFile); } } catch(CmsException exc) { // the resource was not readable if(currentProject.equals(onlineProject(currentUser, currentProject))) { // this IS the onlineproject - throw the exception throw exc; } else { // try to read the resource in the onlineproject cmsFile=(CmsResource)m_resourceCache.get(C_FILE+ C_PROJECT_ONLINE_ID+filename); if (cmsFile==null) { cmsFile = m_dbAccess.readFileHeader(C_PROJECT_ONLINE_ID,filename); m_resourceCache.put(C_FILE+C_PROJECT_ONLINE_ID+filename,cmsFile); } } } if( accessRead(currentUser, currentProject, cmsFile) ) { // acces to all subfolders was granted - return the file-header. return cmsFile; } else { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_ACCESS_DENIED); } } /** * Reads all file headers for a project from the Cms.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the project to read the resources for. * * @return a Vector of resources. * * @exception CmsException will be thrown, if the file couldn't be read. * The CmsException will also be thrown, if the user has not the rights * for this resource. */ public Vector readFileHeaders(CmsUser currentUser, CmsProject currentProject, int projectId) throws CmsException { CmsProject project = readProject(currentUser, currentProject, projectId); Vector resources = m_dbAccess.readResources(project); Vector retValue = new Vector(); // check the security for(int i = 0; i < resources.size(); i++) { if( accessRead(currentUser, currentProject, (CmsResource) resources.elementAt(i)) ) { retValue.addElement(resources.elementAt(i)); } } return retValue; } /** * Reads a folder from the Cms.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param project the project to read the folder from. * @param foldername The complete path of the folder to be read. * * @return folder The read folder. * * @exception CmsException will be thrown, if the folder couldn't be read. * The CmsException will also be thrown, if the user has not the rights * for this resource */ protected CmsFolder readFolder(CmsUser currentUser, CmsProject currentProject, int project, String folder) throws CmsException { if (folder == null) return null; CmsFolder cmsFolder = (CmsFolder) m_resourceCache.get(C_FOLDER + currentProject.getId() + folder); if (cmsFolder == null) { cmsFolder = m_dbAccess.readFolder(project, folder); if (cmsFolder != null) m_resourceCache.put(C_FOLDER + currentProject.getId() + folder, (CmsFolder) cmsFolder); } if (cmsFolder != null) { if (!accessRead(currentUser, currentProject, (CmsResource) cmsFolder)) throw new CmsException("[" + this.getClass().getName() + "] " + folder, CmsException.C_ACCESS_DENIED); } return cmsFolder; } /** * Reads a folder from the Cms.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param foldername The complete path of the folder to be read. * * @return folder The read folder. * * @exception CmsException will be thrown, if the folder couldn't be read. * The CmsException will also be thrown, if the user has not the rights * for this resource. */ public CmsFolder readFolder(CmsUser currentUser, CmsProject currentProject, String folder) throws CmsException { CmsFolder cmsFolder; // read the resource from the currentProject, or the online-project try { cmsFolder = (CmsFolder) m_resourceCache.get(C_FOLDER + currentProject.getId() + folder); if (cmsFolder == null) { cmsFolder = m_dbAccess.readFolder(currentProject.getId(), folder); if (cmsFolder.getState() != C_STATE_DELETED) { m_resourceCache.put(C_FOLDER + currentProject.getId() + folder, (CmsFolder) cmsFolder); } } } catch (CmsException exc) { // the resource was not readable if (currentProject.equals(onlineProject(currentUser, currentProject))) { // this IS the onlineproject - throw the exception throw exc; } else { // try to read the resource in the onlineproject cmsFolder = (CmsFolder) m_resourceCache.get(C_FOLDER + C_PROJECT_ONLINE_ID + folder); if (cmsFolder == null) { cmsFolder = cmsFolder = m_dbAccess.readFolder(C_PROJECT_ONLINE_ID, folder); m_resourceCache.put(C_FOLDER + currentProject.getId() + folder, (CmsFolder) cmsFolder); } } } if (accessRead(currentUser, currentProject, (CmsResource) cmsFolder)) { // acces to all subfolders was granted - return the folder. if (cmsFolder.getState() == C_STATE_DELETED) { throw new CmsException("[" + this.getClass().getName() + "]" + cmsFolder.getAbsolutePath(), CmsException.C_RESOURCE_DELETED); } else { return cmsFolder; } } else { throw new CmsException("[" + this.getClass().getName() + "] " + folder, CmsException.C_ACCESS_DENIED); } } /** * Reads a folder from the Cms.<BR/> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can read the resource</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param folder The complete path to the folder from which the folder will be * read. * @param foldername The name of the folder to be read. * * @return folder The read folder. * * @exception CmsException will be thrown, if the folder couldn't be read. * The CmsException will also be thrown, if the user has not the rights * for this resource. * * @see #readFolder(CmsUser, CmsProject, String) */ public CmsFolder readFolder(CmsUser currentUser, CmsProject currentProject, String folder, String folderName) throws CmsException { return readFolder(currentUser, currentProject, folder + folderName); } /** * Reads all given tasks from a user for a project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the Project in which the tasks are defined. * @param owner Owner of the task. * @param tasktype Task type you want to read: C_TASKS_ALL, C_TASKS_OPEN, C_TASKS_DONE, C_TASKS_NEW. * @param orderBy Chooses, how to order the tasks. * * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readGivenTasks(CmsUser currentUser, CmsProject currentProject, int projectId, String ownerName, int taskType, String orderBy, String sort) throws CmsException { CmsProject project = null; CmsUser owner = null; if(ownerName != null) { owner = readUser(currentUser, currentProject, ownerName); } if(projectId != C_UNKNOWN_ID) { project = readProject(currentUser, currentProject, projectId); } return m_dbAccess.readTasks(project,null, owner, null, taskType, orderBy, sort); } /** * Reads the group of a project from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The group of a resource. * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsGroup readGroup(CmsUser currentUser, CmsProject currentProject, CmsProject project) throws CmsException { CmsGroup group=null; // try to read group form cache group=(CmsGroup)m_groupCache.get(project.getGroupId()); if (group== null) { group=m_dbAccess.readGroup(project.getGroupId()) ; m_groupCache.put(project.getGroupId(),group); } return group; } /** * Reads the group of a resource from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The group of a resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsGroup readGroup(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { CmsGroup group=null; // try to read group form cache group=(CmsGroup)m_groupCache.get(resource.getGroupId()); if (group== null) { group=m_dbAccess.readGroup(resource.getGroupId()) ; m_groupCache.put(resource.getGroupId(),group); } return group; } /** * Reads the group (role) of a task from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param task The task to read from. * @return The group of a resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsGroup readGroup(CmsUser currentUser, CmsProject currentProject, CmsTask task) throws CmsException { // TODO: To be implemented //return null; return m_dbAccess.readGroup(task.getRole()); } /** * Returns a group object.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupname The name of the group that is to be read. * @return Group. * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsGroup readGroup(CmsUser currentUser, CmsProject currentProject, String groupname) throws CmsException { CmsGroup group=null; // try to read group form cache group=(CmsGroup)m_groupCache.get(groupname); if (group== null) { group=m_dbAccess.readGroup(groupname) ; m_groupCache.put(groupname,group); } return group; } /** * Reads the managergroup of a project from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The group of a resource. * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsGroup readManagerGroup(CmsUser currentUser, CmsProject currentProject, CmsProject project) throws CmsException { CmsGroup group=null; // try to read group form cache group=(CmsGroup)m_groupCache.get(project.getManagerGroupId()); if (group== null) { group=m_dbAccess.readGroup(project.getManagerGroupId()) ; m_groupCache.put(project.getManagerGroupId(),group); } return group; } /** * Gets the MimeTypes. * The Mime-Types will be returned. * * <B>Security:</B> * All users are garnted<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * * @return the mime-types. */ public Hashtable readMimeTypes(CmsUser currentUser, CmsProject currentProject) throws CmsException { return(Hashtable) m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_MIMETYPES); } /** * Reads the original agent of a task from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param task The task to read the original agent from. * @return The owner of a task. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readOriginalAgent(CmsUser currentUser, CmsProject currentProject, CmsTask task) throws CmsException { return readUser(currentUser,currentProject,task.getOriginalUser()); } /** * Reads the owner of a project from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The owner of a resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readOwner(CmsUser currentUser, CmsProject currentProject, CmsProject project) throws CmsException { return readUser(currentUser,currentProject,project.getOwnerId()); } /** * Reads the owner of a resource from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The owner of a resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readOwner(CmsUser currentUser, CmsProject currentProject, CmsResource resource) throws CmsException { return readUser(currentUser,currentProject,resource.getOwnerId() ); } /** * Reads the owner (initiator) of a task from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param task The task to read the owner from. * @return The owner of a task. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readOwner(CmsUser currentUser, CmsProject currentProject, CmsTask task) throws CmsException { return readUser(currentUser,currentProject,task.getInitiatorUser()); } /** * Reads the owner of a tasklog from the OpenCms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @return The owner of a resource. * * @exception CmsException Throws CmsException if operation was not succesful. */ public CmsUser readOwner(CmsUser currentUser, CmsProject currentProject, CmsTaskLog log) throws CmsException { return readUser(currentUser,currentProject,log.getUser()); } /** * Reads a project from the Cms. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the project to read. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsProject readProject(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { CmsProject project=null; project=(CmsProject)m_projectCache.get(id); if (project==null) { project=m_dbAccess.readProject(id); m_projectCache.put(id,project); } return project; } /** * Reads a project from the Cms. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param res The resource to read the project of. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsProject readProject(CmsUser currentUser, CmsProject currentProject, CmsResource res) throws CmsException { return readProject(currentUser, currentProject, res.getProjectId()); } /** * Reads a project from the Cms. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param task The task to read the project of. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsProject readProject(CmsUser currentUser, CmsProject currentProject, CmsTask task) throws CmsException { // read the parent of the task, until it has no parents. task = this.readTask(currentUser, currentProject, task.getId()); while(task.getParent() != 0) { task = readTask(currentUser, currentProject, task.getParent()); } return m_dbAccess.readProject(task); } /** * Reads log entries for a project. * * @param projectId The id of the projec for tasklog to read. * @return A Vector of new TaskLog objects * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readProjectLogs(CmsUser currentUser, CmsProject currentProject, int projectid) throws CmsException { return m_dbAccess.readProjectLogs(projectid); } /** * Returns a propertyinformation of a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to view the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformation has * to be read. * @param property The propertydefinition-name of which the propertyinformation has to be read. * * @return propertyinfo The propertyinfo as string. * * @exception CmsException Throws CmsException if operation was not succesful */ public String readProperty(CmsUser currentUser, CmsProject currentProject, String resource, String property) throws CmsException { CmsResource res; // read the resource from the currentProject, or the online-project try { res = readFileHeader(currentUser,currentProject, resource); } catch(CmsException exc) { // the resource was not readable if(currentProject.equals(onlineProject(currentUser, currentProject))) { // this IS the onlineproject - throw the exception throw exc; } else { // try to read the resource in the onlineproject res = readFileHeader(currentUser,onlineProject(currentUser, currentProject), resource); } } // check the security if( ! accessRead(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } String returnValue = null; returnValue = (String)m_propertyCache.get(property + Integer.toString(res.getResourceId()) +","+ Integer.toString(res.getType())); if (returnValue == null){ returnValue = m_dbAccess.readProperty(property,res.getResourceId(),res.getType()); if (returnValue == null) { returnValue=""; } m_propertyCache.put(property +Integer.toString(res.getResourceId()) + ","+ Integer.toString(res.getType()), returnValue); } if (returnValue.equals("")){returnValue=null;} return returnValue; } /** * Reads a definition for the given resource type. * * <B>Security</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param name The name of the propertydefinition to read. * @param resourcetype The name of the resource type for which the propertydefinition * is valid. * * @return propertydefinition The propertydefinition that corresponds to the overgiven * arguments - or null if there is no valid propertydefinition. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsPropertydefinition readPropertydefinition(CmsUser currentUser, CmsProject currentProject, String name, String resourcetype) throws CmsException { CmsResourceType resType = getResourceType(currentUser,currentProject,resourcetype); CmsPropertydefinition returnValue = null; returnValue = (CmsPropertydefinition)m_propertyDefCache.get(name + resType.getResourceType()); if (returnValue == null){ returnValue = m_dbAccess.readPropertydefinition(name, resType); m_propertyDefCache.put(name + resType.getResourceType(), returnValue); } return returnValue; } public Vector readResources(CmsProject project) throws com.opencms.core.CmsException { return m_dbAccess.readResources(project); } /** * Read a task by id. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id for the task to read. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsTask readTask(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { return m_dbAccess.readTask(id); } /** * Reads log entries for a task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The task for the tasklog to read . * @return A Vector of new TaskLog objects * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readTaskLogs(CmsUser currentUser, CmsProject currentProject, int taskid) throws CmsException { return m_dbAccess.readTaskLogs(taskid);; } /** * Reads all tasks for a project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the Project in which the tasks are defined. Can be null for all tasks * @tasktype Task type you want to read: C_TASKS_ALL, C_TASKS_OPEN, C_TASKS_DONE, C_TASKS_NEW * @param orderBy Chooses, how to order the tasks. * @param sort Sort order C_SORT_ASC, C_SORT_DESC, or null * * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readTasksForProject(CmsUser currentUser, CmsProject currentProject, int projectId, int tasktype, String orderBy, String sort) throws CmsException { CmsProject project = null; if(projectId != C_UNKNOWN_ID) { project = readProject(currentUser, currentProject, projectId); } return m_dbAccess.readTasks(project, null, null, null, tasktype, orderBy, sort); } /** * Reads all tasks for a role in a project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the Project in which the tasks are defined. * @param user The user who has to process the task. * @param tasktype Task type you want to read: C_TASKS_ALL, C_TASKS_OPEN, C_TASKS_DONE, C_TASKS_NEW. * @param orderBy Chooses, how to order the tasks. * @param sort Sort order C_SORT_ASC, C_SORT_DESC, or null * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readTasksForRole(CmsUser currentUser, CmsProject currentProject, int projectId, String roleName, int tasktype, String orderBy, String sort) throws CmsException { CmsProject project = null; CmsGroup role = null; if(roleName != null) { role = readGroup(currentUser, currentProject, roleName); } if(projectId != C_UNKNOWN_ID) { project = readProject(currentUser, currentProject, projectId); } return m_dbAccess.readTasks(project, null, null, role, tasktype, orderBy, sort); } /** * Reads all tasks for a user in a project. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param projectId The id of the Project in which the tasks are defined. * @param userName The user who has to process the task. * @param taskType Task type you want to read: C_TASKS_ALL, C_TASKS_OPEN, C_TASKS_DONE, C_TASKS_NEW. * @param orderBy Chooses, how to order the tasks. * @param sort Sort order C_SORT_ASC, C_SORT_DESC, or null * @exception CmsException Throws CmsException if something goes wrong. */ public Vector readTasksForUser(CmsUser currentUser, CmsProject currentProject, int projectId, String userName, int taskType, String orderBy, String sort) throws CmsException { CmsUser user = m_dbAccess.readUser(userName, C_USER_TYPE_SYSTEMUSER); return m_dbAccess.readTasks(currentProject, user, null, null, taskType, orderBy, sort); } /** * Returns a user object.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the user that is to be read. * @return User * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readUser(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { try { CmsUser user=null; // try to read the user from cache user=(CmsUser)m_userCache.get(id); if (user==null) { user=m_dbAccess.readUser(id); m_userCache.put(id,user); } return user; } catch (CmsException ex) { return new CmsUser(C_UNKNOWN_ID, id + "", "deleted user"); } } /** * Returns a user object.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user that is to be read. * @return User * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readUser(CmsUser currentUser, CmsProject currentProject, String username) throws CmsException { CmsUser user = null; // try to read the user from cache user = (CmsUser)m_userCache.get(username+C_USER_TYPE_SYSTEMUSER); if (user == null) { user = m_dbAccess.readUser(username, C_USER_TYPE_SYSTEMUSER); m_userCache.put(username+C_USER_TYPE_SYSTEMUSER,user); } return user; } /** * Returns a user object.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user that is to be read. * @param type The type of the user. * @return User * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readUser(CmsUser currentUser, CmsProject currentProject, String username,int type) throws CmsException { CmsUser user = null; // try to read the user from cache user = (CmsUser)m_userCache.get(username+type); if (user == null) { user = m_dbAccess.readUser(username, type); m_userCache.put(username+type,user); } return user; } /** * Returns a user object if the password for the user is correct.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The username of the user that is to be read. * @param password The password of the user that is to be read. * @return User * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readUser(CmsUser currentUser, CmsProject currentProject, String username, String password) throws CmsException { CmsUser user = null; user = (CmsUser)m_userCache.get(username+C_USER_TYPE_SYSTEMUSER); // store user in cache if (user == null) { user = m_dbAccess.readUser(username, password, C_USER_TYPE_SYSTEMUSER); m_userCache.put(username+C_USER_TYPE_SYSTEMUSER, user); } return user; } /** * Returns a user object if the password for the user is correct.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The username of the user that is to be read. * @return User * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readWebUser(CmsUser currentUser, CmsProject currentProject, String username) throws CmsException { CmsUser user = null; user = (CmsUser)m_userCache.get(username+C_USER_TYPE_WEBUSER); // store user in cache if (user == null) { user = m_dbAccess.readUser(username, C_USER_TYPE_WEBUSER); m_userCache.put(username+C_USER_TYPE_WEBUSER, user); } return user; } /** * Returns a user object if the password for the user is correct.<P/> * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The username of the user that is to be read. * @param password The password of the user that is to be read. * @return User * * @exception CmsException Throws CmsException if operation was not succesful */ public CmsUser readWebUser(CmsUser currentUser, CmsProject currentProject, String username, String password) throws CmsException { CmsUser user = null; user = (CmsUser)m_userCache.get(username+C_USER_TYPE_WEBUSER); // store user in cache if (user == null) { user = m_dbAccess.readUser(username, password, C_USER_TYPE_WEBUSER); m_userCache.put(username+C_USER_TYPE_WEBUSER,user); } return user; } /** * Reaktivates a task from the Cms. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to accept. * * @exception CmsException Throws CmsException if something goes wrong. */ public void reaktivateTask(CmsUser currentUser, CmsProject currentProject, int taskId) throws CmsException { CmsTask task = m_dbAccess.readTask(taskId); task.setState(C_TASK_STATE_STARTED); task.setPercentage(0); task = m_dbAccess.writeTask(task); m_dbAccess.writeSystemTaskLog(taskId, "Task was reactivated from " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } /** * Sets a new password only if the user knows his recovery-password. * * All users can do this if he knows the recovery-password.<P/> * * <B>Security:</B> * All users can do this if he knows the recovery-password.<P/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @param recoveryPassword The recovery password. * @param newPassword The new password. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void recoverPassword(CmsUser currentUser, CmsProject currentProject, String username, String recoveryPassword, String newPassword) throws CmsException { // check the length of the new password. if(newPassword.length() < C_PASSWORD_MINIMUMSIZE) { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_SHORT_PASSWORD); } // check the length of the recovery password. if(recoveryPassword.length() < C_PASSWORD_MINIMUMSIZE) { throw new CmsException("[" + this.getClass().getName() + "] no recovery password."); } m_dbAccess.recoverPassword(username, recoveryPassword, newPassword); } /** * Removes a user from a group. * * Only the admin can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user that is to be removed from the group. * @param groupname The name of the group. * @exception CmsException Throws CmsException if operation was not succesful. */ public void removeUserFromGroup(CmsUser currentUser, CmsProject currentProject, String username, String groupname) throws CmsException { // test if this user is existing in the group if (!userInGroup(currentUser,currentProject,username,groupname)) { // user already there, throw exception throw new CmsException("[" + this.getClass().getName() + "] remove " + username+ " from " +groupname, CmsException.C_NO_USER); } if( isAdmin(currentUser, currentProject) ) { CmsUser user; CmsGroup group; user=readUser(currentUser,currentProject,username); //check if the user exists if (user != null) { group=readGroup(currentUser,currentProject,groupname); //check if group exists if (group != null){ // do not remmove the user from its default group if (user.getDefaultGroupId() != group.getId()) { //remove this user from the group m_dbAccess.removeUserFromGroup(user.getId(),group.getId()); m_usergroupsCache.clear(); } else { throw new CmsException("["+this.getClass().getName()+"]",CmsException.C_NO_DEFAULT_GROUP); } } else { throw new CmsException("["+this.getClass().getName()+"]"+groupname,CmsException.C_NO_GROUP); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } } /** * Renames the file to a new name. <br> * * Rename can only be done in an offline project. To rename a file, the following * steps have to be done: * <ul> * <li> Copy the file with the oldname to a file with the new name, the state * of the new file is set to NEW (2). * <ul> * <li> If the state of the original file is UNCHANGED (0), the file content of the * file is read from the online project. </li> * <li> If the state of the original file is CHANGED (1) or NEW (2) the file content * of the file is read from the offline project. </li> * </ul> * </li> * <li> Set the state of the old file to DELETED (3). </li> * </ul> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param oldname The complete path to the resource which will be renamed. * @param newname The new name of the resource (CmsUser callingUser, No path information allowed). * * @exception CmsException Throws CmsException if operation was not succesful. */ public void renameFile(CmsUser currentUser, CmsProject currentProject, String oldname, String newname) throws CmsException { // read the old file CmsResource file = readFileHeader(currentUser, currentProject, oldname); // checks, if the newname is valid, if not it throws a exception validFilename(newname); // has the user write-access? if (accessWrite(currentUser, currentProject, file)) { String path = oldname.substring(0, oldname.lastIndexOf("/") + 1); copyFile(currentUser, currentProject, oldname, path + newname); deleteFile(currentUser, currentProject, oldname); } else { throw new CmsException("[" + this.getClass().getName() + "] " + oldname, CmsException.C_NO_ACCESS); } /* // check, if the new name is a valid filename validFilename(newname); // read the old file CmsResource file = readFileHeader(currentUser, currentProject, oldname); // has the user write-access? if( accessWrite(currentUser, currentProject, file) ) { // write-acces was granted - rename the file. m_dbAccess.renameFile(currentProject, onlineProject(currentUser, currentProject), currentUser.getId(), file.getResourceId(), file.getPath() + newname ); // copy the metainfos writeProperties(currentUser,currentProject, file.getPath() + newname, readAllProperties(currentUser,currentProject,file.getAbsolutePath())); // inform about the file-system-change fileSystemChanged(); } else { throw new CmsException("[" + this.getClass().getName() + "] " + oldname, CmsException.C_NO_ACCESS); } */ } /** * This method loads old sessiondata from the database. It is used * for sessionfailover. * * @param oldSessionId the id of the old session. * @return the old sessiondata. */ public Hashtable restoreSession(String oldSessionId) throws CmsException { return m_dbAccess.readSession(oldSessionId); } /** * Set a new name for a task * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to set the percentage. * @param name The new name value * * @exception CmsException Throws CmsException if something goes wrong. */ public void setName(CmsUser currentUser, CmsProject currentProject, int taskId, String name) throws CmsException { if( (name == null) || name.length() == 0) { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_BAD_NAME); } CmsTask task = m_dbAccess.readTask(taskId); task.setName(name); task = m_dbAccess.writeTask(task); m_dbAccess.writeSystemTaskLog(taskId, "Name was set to " + name + "% from " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } /** * Sets a new parent-group for an already existing group in the Cms.<BR/> * * Only the admin can do this.<P/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param groupName The name of the group that should be written to the Cms. * @param parentGroupName The name of the parentGroup to set, or null if the parent * group should be deleted. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setParentGroup(CmsUser currentUser, CmsProject currentProject, String groupName, String parentGroupName) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) ) { CmsGroup group = readGroup(currentUser, currentProject, groupName); int parentGroupId = C_UNKNOWN_ID; // if the group exists, use its id, else set to unknown. if( parentGroupName != null ) { parentGroupId = readGroup(currentUser, currentProject, parentGroupName).getId(); } group.setParentId(parentGroupId); // write the changes to the cms writeGroup(currentUser,currentProject,group); } else { throw new CmsException("[" + this.getClass().getName() + "] " + groupName, CmsException.C_NO_ACCESS); } } /** * Sets the password for a user. * * Only a adminstrator can do this.<P/> * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @param newPassword The new password. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setPassword(CmsUser currentUser, CmsProject currentProject, String username, String newPassword) throws CmsException { // check the length of the new password. if(newPassword.length() < C_PASSWORD_MINIMUMSIZE) { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_SHORT_PASSWORD); } if( isAdmin(currentUser, currentProject) ) { m_dbAccess.setPassword(username, newPassword); } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } /** * Sets the password for a user. * * Only a adminstrator or the curretuser can do this.<P/> * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * Current users can change their own password. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @param oldPassword The new password. * @param newPassword The new password. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setPassword(CmsUser currentUser, CmsProject currentProject, String username, String oldPassword, String newPassword) throws CmsException { // check the length of the new password. if(newPassword.length() < C_PASSWORD_MINIMUMSIZE) { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_SHORT_PASSWORD); } // read the user CmsUser user; try { user = readUser(currentUser, currentProject, username, oldPassword); } catch(CmsException exc) { // this is no system-user - maybe a webuser? user = readWebUser(currentUser, currentProject, username, oldPassword); } if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) && ( isAdmin(user, currentProject) || user.equals(currentUser)) ) { m_dbAccess.setPassword(username, newPassword); } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } /** * Set priority of a task * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to set the percentage. * @param new priority value * * @exception CmsException Throws CmsException if something goes wrong. */ public void setPriority(CmsUser currentUser, CmsProject currentProject, int taskId, int priority) throws CmsException { CmsTask task = m_dbAccess.readTask(taskId); task.setPriority(priority); task = m_dbAccess.writeTask(task); m_dbAccess.writeSystemTaskLog(taskId, "Priority was set to " + priority + " from " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } /** * Sets the recovery password for a user. * * Only a adminstrator or the curretuser can do this.<P/> * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * Current users can change their own password. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param username The name of the user. * @param password The password of the user. * @param newPassword The new recoveryPassword to be set. * * @exception CmsException Throws CmsException if operation was not succesfull. */ public void setRecoveryPassword(CmsUser currentUser, CmsProject currentProject, String username, String password, String newPassword) throws CmsException { // check the length of the new password. if(newPassword.length() < C_PASSWORD_MINIMUMSIZE) { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_SHORT_PASSWORD); } // read the user CmsUser user; try { user = readUser(currentUser, currentProject, username, password); } catch(CmsException exc) { // this is no system-user - maybe a webuser? user = readWebUser(currentUser, currentProject, username, password); } if( ! anonymousUser(currentUser, currentProject).equals( currentUser ) && ( isAdmin(user, currentProject) || user.equals(currentUser)) ) { m_dbAccess.setRecoveryPassword(username, newPassword); } else { throw new CmsException("[" + this.getClass().getName() + "] " + username, CmsException.C_NO_ACCESS); } } /** * Set a Parameter for a task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskId The Id of the task. * @param parName Name of the parameter. * @param parValue Value if the parameter. * * @return The id of the inserted parameter or 0 if the parameter already exists for this task. * * @exception CmsException Throws CmsException if something goes wrong. */ public void setTaskPar(CmsUser currentUser, CmsProject currentProject, int taskId, String parName, String parValue) throws CmsException { m_dbAccess.setTaskPar(taskId, parName, parValue); } /** * Set timeout of a task * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task to set the percentage. * @param new timeout value * * @exception CmsException Throws CmsException if something goes wrong. */ public void setTimeout(CmsUser currentUser, CmsProject currentProject, int taskId, long timeout) throws CmsException { CmsTask task = m_dbAccess.readTask(taskId); java.sql.Timestamp timestamp = new java.sql.Timestamp(timeout); task.setTimeOut(timestamp); task = m_dbAccess.writeTask(task); m_dbAccess.writeSystemTaskLog(taskId, "Timeout was set to " + timeout + " from " + currentUser.getFirstname() + " " + currentUser.getLastname() + "."); } /** * This method stores sessiondata into the database. It is used * for sessionfailover. * * @param sessionId the id of the session. * @param isNew determines, if the session is new or not. * @return data the sessionData. */ public void storeSession(String sessionId, Hashtable sessionData) throws CmsException { // update the session int rowCount = m_dbAccess.updateSession(sessionId, sessionData); if(rowCount != 1) { // the entry dosn't exists - create it m_dbAccess.createSession(sessionId, sessionData); } } /** * Unlocks all resources in this project. * * <B>Security</B> * Only the admin or the owner of the project can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param id The id of the project to be published. * * @exception CmsException Throws CmsException if something goes wrong. */ public void unlockProject(CmsUser currentUser, CmsProject currentProject, int id) throws CmsException { // read the project. CmsProject project = readProject(currentUser, currentProject, id); // check the security if( (isAdmin(currentUser, currentProject) || isManagerOfProject(currentUser, project) ) && (project.getFlags() == C_PROJECT_STATE_UNLOCKED )) { // unlock all resources in the project m_dbAccess.unlockProject(project); m_resourceCache.clear(); m_projectCache.clear(); } else { throw new CmsException("[" + this.getClass().getName() + "] " + id, CmsException.C_NO_ACCESS); } } /** * Unlocks a resource.<br> * * Only a resource in an offline project can be unlock. The state of the resource * is set to CHANGED (1). * If the content of this resource is not exisiting in the offline project already, * it is read from the online project and written into the offline project. * Only the user who locked a resource can unlock it. * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user had locked the resource before</li> * </ul> * * @param user The user who wants to lock the file. * @param project The project in which the resource will be used. * @param resourcename The complete path to the resource to lock. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void unlockResource(CmsUser currentUser,CmsProject currentProject, String resourcename) throws CmsException { CmsResource cmsResource=null; // read the resource, that shold be locked if (resourcename.endsWith("/")) { cmsResource = readFolder(currentUser,currentProject,resourcename); } else { cmsResource = (CmsFile)readFileHeader(currentUser,currentProject,resourcename); } // check, if the user may lock the resource if( accessUnlock(currentUser, currentProject, cmsResource) ) { // unlock the resource. if (cmsResource.isLocked()){ // check if the resource is locked by the actual user if (cmsResource.isLockedBy()==currentUser.getId()) { // unlock the resource cmsResource.setLocked(C_UNKNOWN_ID); //update resource m_dbAccess.updateLockstate(cmsResource); if (resourcename.endsWith("/")) { //m_dbAccess.writeFolder(currentProject,(CmsFolder)cmsResource,false); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+resourcename,(CmsFolder)cmsResource); } else { //m_dbAccess.writeFileHeader(currentProject,onlineProject(currentUser, currentProject),(CmsFile)cmsResource,false); // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+resourcename,(CmsFile)cmsResource); } m_subresCache.clear(); } else { throw new CmsException("[" + this.getClass().getName() + "] " + resourcename + CmsException.C_NO_ACCESS); } } // if this resource is a folder -> lock all subresources, too if(cmsResource.isFolder()) { Vector files = getFilesInFolder(currentUser,currentProject, cmsResource.getAbsolutePath()); Vector folders = getSubFolders(currentUser,currentProject, cmsResource.getAbsolutePath()); CmsResource currentResource; // lock all files in this folder for(int i = 0; i < files.size(); i++ ) { currentResource = (CmsResource)files.elementAt(i); if (currentResource.getState() != C_STATE_DELETED) { unlockResource(currentUser, currentProject, currentResource.getAbsolutePath()); } } // lock all files in this folder for(int i = 0; i < folders.size(); i++) { currentResource = (CmsResource)folders.elementAt(i); if (currentResource.getState() != C_STATE_DELETED) { unlockResource(currentUser, currentProject, currentResource.getAbsolutePath()); } } } } else { throw new CmsException("[" + this.getClass().getName() + "] " + resourcename, CmsException.C_NO_ACCESS); } } /** * Checks if a user is member of a group.<P/> * * <B>Security:</B> * All users are granted, except the anonymous user. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param callingUser The user who wants to use this method. * @param nameuser The name of the user to check. * @param groupname The name of the group to check. * @return True or False * * @exception CmsException Throws CmsException if operation was not succesful */ public boolean userInGroup(CmsUser currentUser, CmsProject currentProject, String username, String groupname) throws CmsException { Vector groups = getGroupsOfUser(currentUser,currentProject,username); CmsGroup group; for(int z = 0; z < groups.size(); z++) { group = (CmsGroup) groups.elementAt(z); if(groupname.equals(group.getName())) { return true; } } return false; } /** * Checks ii characters in a String are allowed for filenames * * @param filename String to check * * @exception throws a exception, if the check fails. */ protected void validFilename( String filename ) throws CmsException { if (filename == null) { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_BAD_NAME); } int l = filename.length(); if (l == 0 || filename.startsWith(".")) { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_BAD_NAME); } for (int i=0; i<l; i++) { char c = filename.charAt(i); if ( ((c < 'a') || (c > 'z')) && ((c < '0') || (c > '9')) && ((c < 'A') || (c > 'Z')) && (c != '-') && (c != '.') && (c != '|') && (c != '_') && (c != '~') ) { throw new CmsException("[" + this.getClass().getName() + "] " + filename, CmsException.C_BAD_NAME); } } } /** * Checks ii characters in a String are allowed for names * * @param name String to check * * @exception throws a exception, if the check fails. */ protected void validName(String name, boolean blank) throws CmsException { if (name == null || name.length() == 0 || name.trim().length() == 0) { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_BAD_NAME); } // throw exception if no blanks are allowed if (!blank) { int l = name.length(); for (int i = 0; i < l; i++) { char c = name.charAt(i); if (c == ' ') { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_BAD_NAME); } } } /* for (int i=0; i<l; i++) { char c = name.charAt(i); if ( ((c < 'a') || (c > 'z')) && ((c < '0') || (c > '9')) && ((c < 'A') || (c > 'Z')) && (c != '-') && (c != '.') && (c != '|') && (c != '_') && (c != '~') ) { throw new CmsException("[" + this.getClass().getName() + "] " + name, CmsException.C_BAD_NAME); } } */ } /** * Writes the export-path for the system. * This path is used for db-export and db-import. * * <B>Security:</B> * Users, which are in the group "administrators" are granted.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param mountpoint The mount point in the Cms filesystem. */ public void writeExportPath(CmsUser currentUser, CmsProject currentProject, String path) throws CmsException { // check the security if( isAdmin(currentUser, currentProject) ) { // security is ok - write the exportpath. if(m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_EXPORTPATH) == null) { // the property wasn't set before. m_dbAccess.addSystemProperty(C_SYSTEMPROPERTY_EXPORTPATH, path); } else { // overwrite the property. m_dbAccess.writeSystemProperty(C_SYSTEMPROPERTY_EXPORTPATH, path); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + path, CmsException.C_NO_ACCESS); } } /** * Writes a file to the Cms.<br> * * A file can only be written to an offline project.<br> * The state of the resource is set to CHANGED (1). The file content of the file * is either updated (if it is already existing in the offline project), or created * in the offline project (if it is not available there).<br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who own this file. * @param currentProject The project in which the resource will be used. * @param file The name of the file to write. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void writeFile(CmsUser currentUser, CmsProject currentProject, CmsFile file) throws CmsException { // has the user write-access? if( accessWrite(currentUser, currentProject, (CmsResource)file) ) { // write-acces was granted - write the file. m_dbAccess.writeFile(currentProject, onlineProject(currentUser, currentProject), file,true ); if (file.getState()==C_STATE_UNCHANGED) { file.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+file.getAbsolutePath(),file); //m_resourceCache.put(C_FILECONTENT+currentProject.getId()+file.getAbsolutePath(),file); m_subresCache.clear(); m_accessCache.clear(); // inform about the file-system-change fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + file.getAbsolutePath(), CmsException.C_NO_ACCESS); } } /** * Writes the file extensions * * <B>Security:</B> * Users, which are in the group "Administrators" are authorized.<BR/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param extensions Holds extensions as keys and resourcetypes (Stings) as values */ public void writeFileExtensions(CmsUser currentUser, CmsProject currentProject, Hashtable extensions) throws CmsException { if (extensions != null) { if (isAdmin(currentUser, currentProject)) { if (m_dbAccess.readSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS) == null) { // the property wasn't set before. m_dbAccess.addSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS, extensions); } else { // overwrite the property. m_dbAccess.writeSystemProperty(C_SYSTEMPROPERTY_EXTENSIONS, extensions); } } else { throw new CmsException("[" + this.getClass().getName() + "] " + extensions.size(), CmsException.C_NO_ACCESS); } } } /** * Writes a fileheader to the Cms.<br> * * A file can only be written to an offline project.<br> * The state of the resource is set to CHANGED (1). The file content of the file * is either updated (if it is already existing in the offline project), or created * in the offline project (if it is not available there).<br> * * <B>Security:</B> * Access is granted, if: * <ul> * <li>the user has access to the project</li> * <li>the user can write the resource</li> * <li>the resource is locked by the callingUser</li> * </ul> * * @param currentUser The user who own this file. * @param currentProject The project in which the resource will be used. * @param file The file to write. * * @exception CmsException Throws CmsException if operation was not succesful. */ public void writeFileHeader(CmsUser currentUser, CmsProject currentProject, CmsFile file) throws CmsException { // has the user write-access? if( accessWrite(currentUser, currentProject, (CmsResource)file) ) { // write-acces was granted - write the file. m_dbAccess.writeFileHeader(currentProject, file,true ); if (file.getState()==C_STATE_UNCHANGED) { file.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+file.getAbsolutePath(),file); // inform about the file-system-change m_subresCache.clear(); m_accessCache.clear(); fileSystemChanged(false); } else { throw new CmsException("[" + this.getClass().getName() + "] " + file.getAbsolutePath(), CmsException.C_NO_ACCESS); } } /** * Writes an already existing group in the Cms.<BR/> * * Only the admin can do this.<P/> * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param group The group that should be written to the Cms. * @exception CmsException Throws CmsException if operation was not succesfull. */ public void writeGroup(CmsUser currentUser, CmsProject currentProject, CmsGroup group) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) ) { m_dbAccess.writeGroup(group); m_groupCache.put(group.getName(),group); } else { throw new CmsException("[" + this.getClass().getName() + "] " + group.getName(), CmsException.C_NO_ACCESS); } } /** * Writes a couple of propertyinformation for a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to write the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformation * has to be read. * @param propertyinfos A Hashtable with propertydefinition- propertyinfo-pairs as strings. * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeProperties(CmsUser currentUser, CmsProject currentProject, String resource, Hashtable propertyinfos) throws CmsException { // read the resource CmsResource res = readFileHeader(currentUser,currentProject, resource); // check the security if( ! accessWrite(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } m_dbAccess.writeProperties(propertyinfos,res.getResourceId(),res.getType()); m_propertyCache.clear(); if (res.getState()==C_STATE_UNCHANGED) { res.setState(C_STATE_CHANGED); } if(res.isFile()){ m_dbAccess.writeFileHeader(currentProject, (CmsFile) res, false); // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+resource,res); } else { m_dbAccess.writeFolder(currentProject, readFolder(currentUser,currentProject, resource), false); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+resource,(CmsFolder)res); } m_subresCache.clear(); } /** * Writes a propertyinformation for a file or folder. * * <B>Security</B> * Only the user is granted, who has the right to write the resource. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param resource The name of the resource of which the propertyinformation has * to be read. * @param property The propertydefinition-name of which the propertyinformation has to be set. * @param value The value for the propertyinfo to be set. * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeProperty(CmsUser currentUser, CmsProject currentProject, String resource, String property, String value) throws CmsException { // read the resource CmsResource res = readFileHeader(currentUser,currentProject, resource); // check the security if( ! accessWrite(currentUser, currentProject, res) ) { throw new CmsException("[" + this.getClass().getName() + "] " + resource, CmsException.C_NO_ACCESS); } m_dbAccess.writeProperty(property, value, res.getResourceId(),res.getType()); m_propertyCache.clear(); // set the file-state to changed if(res.isFile()){ m_dbAccess.writeFileHeader(currentProject, (CmsFile) res, true); if (res.getState()==C_STATE_UNCHANGED) { res.setState(C_STATE_CHANGED); } // update the cache m_resourceCache.put(C_FILE+currentProject.getId()+resource,res); } else { if (res.getState()==C_STATE_UNCHANGED) { res.setState(C_STATE_CHANGED); } m_dbAccess.writeFolder(currentProject, readFolder(currentUser,currentProject, resource), true); // update the cache m_resourceCache.put(C_FOLDER+currentProject.getId()+resource,(CmsFolder)res); } m_subresCache.clear(); } /** * Updates the propertydefinition for the resource type.<BR/> * * <B>Security</B> * Only the admin can do this. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param propertydef The propertydef to be deleted. * * @return The propertydefinition, that was written. * * @exception CmsException Throws CmsException if something goes wrong. */ public CmsPropertydefinition writePropertydefinition(CmsUser currentUser, CmsProject currentProject, CmsPropertydefinition propertydef) throws CmsException { // check the security if( isAdmin(currentUser, currentProject) ) { m_propertyDefVectorCache.clear(); return( m_dbAccess.writePropertydefinition(propertydef) ); } else { throw new CmsException("[" + this.getClass().getName() + "] " + propertydef.getName(), CmsException.C_NO_ACCESS); } } /** * Writes a new user tasklog for a task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task . * @param comment Description for the log * * @exception CmsException Throws CmsException if something goes wrong. */ public void writeTaskLog(CmsUser currentUser, CmsProject currentProject, int taskid, String comment) throws CmsException { m_dbAccess.writeTaskLog(taskid, currentUser.getId(), new java.sql.Timestamp(System.currentTimeMillis()), comment, C_TASKLOG_USER); } /** * Writes a new user tasklog for a task. * * <B>Security:</B> * All users are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param taskid The Id of the task . * @param comment Description for the log * @param tasktype Type of the tasklog. User tasktypes must be greater then 100. * * @exception CmsException Throws CmsException if something goes wrong. */ public void writeTaskLog(CmsUser currentUser, CmsProject currentProject, int taskid, String comment, int type) throws CmsException { m_dbAccess.writeTaskLog(taskid, currentUser.getId(), new java.sql.Timestamp(System.currentTimeMillis()), comment, type); } /** * Updates the user information.<BR/> * * Only the administrator can do this.<P/> * * <B>Security:</B> * Only users, which are in the group "administrators" are granted. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param user The user to be updated. * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeUser(CmsUser currentUser, CmsProject currentProject, CmsUser user) throws CmsException { // Check the security if( isAdmin(currentUser, currentProject) || (currentUser.equals(user)) ) { // prevent the admin to be set disabled! if( isAdmin(user, currentProject) ) { user.setEnabled(); } m_dbAccess.writeUser(user); // update the cache m_userCache.put(user.getName()+user.getType(),user); } else { throw new CmsException("[" + this.getClass().getName() + "] " + user.getName(), CmsException.C_NO_ACCESS); } } /** * Updates the user information of a web user.<BR/> * * Only a web user can be updated this way.<P/> * * <B>Security:</B> * Only users of the user type webuser can be updated this way. * * @param currentUser The user who requested this method. * @param currentProject The current project of the user. * @param user The user to be updated. * * @exception CmsException Throws CmsException if operation was not succesful */ public void writeWebUser(CmsUser currentUser, CmsProject currentProject, CmsUser user) throws CmsException { // Check the security if( user.getType() == C_USER_TYPE_WEBUSER) { m_dbAccess.writeUser(user); // update the cache m_userCache.put(user.getName()+user.getType(),user); } else { throw new CmsException("[" + this.getClass().getName() + "] " + user.getName(), CmsException.C_NO_ACCESS); } } }
package org.jdesktop.swingx; import java.awt.BorderLayout; import java.awt.Point; import java.lang.reflect.Method; import java.util.logging.Logger; import javax.swing.AbstractButton; import javax.swing.Action; import javax.swing.Box; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JScrollPane; import javax.swing.JToolBar; import javax.swing.UIManager; /** * Base class for supporting inclusion of interactive tests into a JUnit test case. * Note that the interactive tests are NOT executed by the JUnit framework and * are not automated. They are typically used for visual inspection of features * during development. It is convenient to include the interactive tests along with * the automated JUnit tests since they may share resources and it keeps tests * focused in a single place. * <p> * All interactive test methods should be prefixed with &quot;interactive&quot;, * e.g. interactiveTestTableSorting().</p> * <p> * The test class's <code>main</code> method should be used to control which * interactive tests should run. Use <code>runInteractiveTests()</code> method * to run all interactive tests in the class.</p> * <p> * Ultimately we need to investigate moving to a mechanism which can help automate * interactive tests. JFCUnit is being investigated. In the meantime, this * is quick and dirty and cheap. * </p> * @author Amy Fowler * @version 1.0 */ public abstract class InteractiveTestCase extends junit.framework.TestCase { private static final Logger LOG = Logger .getLogger(InteractiveTestCase.class.getName()); protected Point frameLocation = new Point(0,0); public InteractiveTestCase() { super(); String className = getClass().getName(); int lastDot = className.lastIndexOf("."); String lastElement = className.substring(lastDot + 1); setName(lastElement); } public InteractiveTestCase(String testTitle) { super(testTitle); } public JXFrame wrapWithScrollingInFrame(JComponent component, String title) { JScrollPane scroller = new JScrollPane(component); return wrapInFrame(scroller, title); } public JXFrame wrapWithScrollingInFrame(JComponent leftComp, JComponent rightComp, String title) { JComponent comp = Box.createHorizontalBox(); comp.add(new JScrollPane(leftComp)); comp.add(new JScrollPane(rightComp)); JXFrame frame = wrapInFrame(comp, title); return frame; } public JXFrame wrapInFrame(JComponent component, String title) { JXFrame frame = new JXFrame(title, false); JToolBar toolbar = new JToolBar(); frame.getRootPaneExt().setToolBar(toolbar); frame.getContentPane().add(BorderLayout.CENTER, component); // frame.getContentPane().add(BorderLayout.NORTH, toolbar); frame.pack(); frame.setLocation(frameLocation); if (frameLocation.x == 0) { frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setTitle(title+" [close me and all tests will close]"); } frameLocation.x += 30; frameLocation.y += 30; return frame; } /** * Runs all tests whose method names match the specified regex pattern. * @param regexPattern regular expression pattern used to match test method names * @throws java.lang.Exception */ public void runInteractiveTests(String regexPattern) throws java.lang.Exception { setUp(); Class testClass = getClass(); Method methods[] = testClass.getMethods(); for (int i = 0; i < methods.length; i++) { if (methods[i].getName().matches(regexPattern)) { try { methods[i].invoke(this, null); } catch (Exception e) { System.out.println("could not run interactive test: " + methods[i].getName()); e.printStackTrace(); } } } if (methods.length == 0) { System.out.println("no test methods found matching the pattern: "+ regexPattern); } tearDown(); } /** * Runs all test methods which are prefixed with &quot;interactive&quot;. * @throws java.lang.Exception */ public void runInteractiveTests() throws java.lang.Exception { runInteractiveTests("interactive.*"); } public void addAction(JXFrame frame, Action action) { JToolBar toolbar = frame.getRootPaneExt().getToolBar(); if (toolbar != null) { AbstractButton button = toolbar.add(action); button.setFocusable(false); } } public void addMessage(JXFrame frame, String message) { JXStatusBar statusBar = frame.getRootPaneExt().getStatusBar(); if (statusBar == null) { statusBar = new JXStatusBar(); frame.getRootPaneExt().setStatusBar(statusBar); } statusBar.add(new JLabel(message)); } public static void setSystemLF(boolean system) { String lfName = system ? UIManager.getSystemLookAndFeelClassName() : UIManager.getCrossPlatformLookAndFeelClassName(); try { UIManager.setLookAndFeel(lfName); } catch (Exception e1) { LOG.info("exception when setting LF to " + lfName); } } }
package com.rapidminer.operator.io; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.sql.Clob; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import com.rapidminer.example.Attribute; import com.rapidminer.example.ExampleSet; import com.rapidminer.example.table.AttributeFactory; import com.rapidminer.example.table.DataRow; import com.rapidminer.example.table.DataRowFactory; import com.rapidminer.example.table.MemoryExampleTable; import com.rapidminer.operator.Annotations; import com.rapidminer.operator.OperatorDescription; import com.rapidminer.operator.OperatorException; import com.rapidminer.operator.UserError; import com.rapidminer.operator.ports.metadata.AttributeMetaData; import com.rapidminer.operator.ports.metadata.ExampleSetMetaData; import com.rapidminer.operator.ports.metadata.MetaData; import com.rapidminer.parameter.ParameterType; import com.rapidminer.parameter.ParameterTypeCategory; import com.rapidminer.tools.I18N; import com.rapidminer.tools.LogService; import com.rapidminer.tools.Ontology; import com.rapidminer.tools.ParameterService; import com.rapidminer.tools.Tools; import com.rapidminer.tools.jdbc.ColumnIdentifier; import com.rapidminer.tools.jdbc.DatabaseHandler; import com.rapidminer.tools.jdbc.StatementCreator; import com.rapidminer.tools.jdbc.TableName; import com.rapidminer.tools.jdbc.connection.ConnectionEntry; import com.rapidminer.tools.jdbc.connection.ConnectionProvider; /** * Reads an {@link ExampleSet} from an SQL {@link Connection} table. SQL datatypes are mapped to value types of * {@link Attribute}s by using {@link DatabaseHandler#getRapidMinerTypeIndex(int)}. Data is copied into main memory. * * Data can be read from either a table name or a query. In the first case, the meta data is retrieved from the database * meta data for that table for reasons of efficiency. In the latter case, a LIMIT 0 is appended to the query, which may * fail on some systems but which enables us to retrieve the structure of the table without performing the entire query * itself. * * @author Simon Fischer * */ public class DatabaseDataReader extends AbstractExampleSource implements ConnectionProvider { /** System property to decide whether meta data should be fetched from DB for database queries. */ public static final String PROPERTY_EVALUATE_MD_FOR_SQL_QUERIES = "rapidminer.gui.evaluate_meta_data_for_sql_queries"; public DatabaseDataReader(OperatorDescription description) { super(description); } private DatabaseHandler databaseHandler; @Override public ExampleSet read() throws OperatorException { try { ExampleSet result = super.read(); return result; } finally { if (databaseHandler != null && databaseHandler.getConnection() != null) { try { databaseHandler.getConnection().close(); } catch (SQLException e) { getLogger().log(Level.WARNING, "Error closing database connection: " + e, e); } } } } protected ResultSet getResultSet() throws OperatorException { try { databaseHandler = DatabaseHandler.getConnectedDatabaseHandler(this); String query = getQuery(databaseHandler.getStatementCreator()); if (query == null) { throw new UserError(this, 202, new Object[] { "query", "query_file", "table_name" }); } return databaseHandler.executeStatement(query, true, this, getLogger()); } catch (SQLException sqle) { throw new UserError(this, sqle, 304, sqle.getMessage()); } } @Override public ExampleSet createExampleSet() throws OperatorException { ResultSet resultSet = getResultSet(); MemoryExampleTable table; try { List<Attribute> attributes = getAttributes(resultSet); table = createExampleTable(resultSet, attributes, getParameterAsInt(ExampleSource.PARAMETER_DATAMANAGEMENT), getLogger()); } catch (SQLException e) { throw new UserError(this, e, 304, e.getMessage()); } finally { try { resultSet.close(); } catch (SQLException e) { getLogger().log(Level.WARNING, "DB error closing result set: " + e, e); } } return table.createExampleSet(); } @Override public MetaData getGeneratedMetaData() throws OperatorException { ExampleSetMetaData metaData = new ExampleSetMetaData(); try { databaseHandler = DatabaseHandler.getConnectedDatabaseHandler(this); switch (getParameterAsInt(DatabaseHandler.PARAMETER_DEFINE_QUERY)) { case DatabaseHandler.QUERY_TABLE: List<ColumnIdentifier> columns = databaseHandler.getAllColumnNames(DatabaseHandler.getSelectedTableName(this), databaseHandler.getConnection().getMetaData()); for (ColumnIdentifier column : columns) { metaData.addAttribute(new AttributeMetaData(column.getColumnName(), DatabaseHandler.getRapidMinerTypeIndex(column.getSqlType()))); } break; case DatabaseHandler.QUERY_QUERY: case DatabaseHandler.QUERY_FILE: default: if (!"false".equals(ParameterService.getParameterValue(PROPERTY_EVALUATE_MD_FOR_SQL_QUERIES))) { String query = getQuery(databaseHandler.getStatementCreator()); PreparedStatement prepared = databaseHandler.getConnection().prepareStatement(query); // query = "SELECT * FROM (" + query + ") dummy WHERE 1=0"; // ResultSet resultSet = databaseHandler.executeStatement(query, true, this, getLogger()); List<Attribute> attributes = getAttributes(prepared.getMetaData()); for (Attribute att : attributes) { metaData.addAttribute(new AttributeMetaData(att)); } prepared.close(); } break; } } catch (SQLException e) { //LogService.getRoot().log(Level.WARNING, "Failed to fetch meta data: " + e, e); LogService.getRoot().log(Level.WARNING, I18N.getMessage(LogService.getRoot().getResourceBundle(), "com.rapidminer.operator.io.DatabaseDataReader.fetching_meta_data_error", e), e); } finally { try { if (databaseHandler != null && databaseHandler.getConnection() != null) { databaseHandler.disconnect(); } } catch (SQLException e) { getLogger().log(Level.WARNING, "DB error closing connection: " + e, e); } } return metaData; } public static MemoryExampleTable createExampleTable(ResultSet resultSet, List<Attribute> attributes, int dataManagementType, Logger logger) throws SQLException, OperatorException { ResultSetMetaData metaData = resultSet.getMetaData(); Attribute[] attributeArray = attributes.toArray(new Attribute[attributes.size()]); MemoryExampleTable table = new MemoryExampleTable(attributes); DataRowFactory factory = new DataRowFactory(dataManagementType, '.'); while (resultSet.next()) { DataRow dataRow = factory.create(attributeArray.length); // double[] data = new double[attributeArray.length]; for (int i = 1; i <= metaData.getColumnCount(); i++) { Attribute attribute = attributeArray[i - 1]; int valueType = attribute.getValueType(); double value; if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(valueType, Ontology.DATE_TIME)) { Timestamp timestamp = resultSet.getTimestamp(i); if (resultSet.wasNull()) { value = Double.NaN; } else { value = timestamp.getTime(); } } else if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(valueType, Ontology.NUMERICAL)) { value = resultSet.getDouble(i); if (resultSet.wasNull()) { value = Double.NaN; } } else { if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(valueType, Ontology.NOMINAL)) { String valueString; if (metaData.getColumnType(i) == Types.CLOB) { Clob clob = resultSet.getClob(i); if (clob != null) { BufferedReader in = null; try { in = new BufferedReader(clob.getCharacterStream()); String line = null; try { StringBuffer buffer = new StringBuffer(); while ((line = in.readLine()) != null) { buffer.append(line + "\n"); } valueString = buffer.toString(); } catch (IOException e) { throw new OperatorException("Database error occurred: " + e, e); } } finally { try { in.close(); } catch (IOException e) {} } } else { valueString = null; } } else { valueString = resultSet.getString(i); } if (resultSet.wasNull() || valueString == null) { value = Double.NaN; } else { value = attribute.getMapping().mapString(valueString); } } else { if (logger != null) { logger.warning("Unknown column type: " + attribute); } value = Double.NaN; } } dataRow.set(attribute, value); // data[i-1] = value; } table.addDataRow(dataRow); // new DoubleArrayDataRow(data)); } return table; } public static List<Attribute> getAttributes(ResultSet resultSet) throws SQLException { ResultSetMetaData metaData = resultSet.getMetaData(); return getAttributes(metaData); } private static List<Attribute> getAttributes(ResultSetMetaData metaData) throws SQLException { List<Attribute> result = new LinkedList<Attribute>(); if (metaData != null) { // A map mapping original column names to a counter specifying how often // they were chosen Map<String, Integer> duplicateNameMap = new HashMap<String, Integer>(); for (int columnIndex = 1; columnIndex <= metaData.getColumnCount(); columnIndex++) { // column name from DB String dbColumnName = metaData.getColumnLabel(columnIndex); // name that will be used in example set String columnName = dbColumnName; // check original name first Integer duplicateCount = duplicateNameMap.get(dbColumnName); boolean isUnique = duplicateCount == null; if (isUnique) { // name is unique duplicateNameMap.put(columnName, new Integer(1)); } else { // name already present, iterate until unique while (!isUnique) { // increment duplicate counter duplicateCount = new Integer(duplicateCount.intValue() + 1); // create new name proposal columnName = dbColumnName + "_" + (duplicateCount - 1); // -1 because of compatibility // check if new name is already taken isUnique = duplicateNameMap.get(columnName) == null; } // save new duplicate count for old db column name duplicateNameMap.put(dbColumnName, duplicateCount); } int attributeType = DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(columnIndex)); final Attribute attribute = AttributeFactory.createAttribute(columnName, attributeType); attribute.getAnnotations().setAnnotation("sql_type", metaData.getColumnTypeName(columnIndex)); result.add(attribute); } } return result; } private String getQuery(StatementCreator sc) throws OperatorException { switch (getParameterAsInt(DatabaseHandler.PARAMETER_DEFINE_QUERY)) { case DatabaseHandler.QUERY_QUERY: { String query = getParameterAsString(DatabaseHandler.PARAMETER_QUERY); if (query != null) { query = query.trim(); } return query; } case DatabaseHandler.QUERY_FILE: { File queryFile = getParameterAsFile(DatabaseHandler.PARAMETER_QUERY_FILE); if (queryFile != null) { String query = null; try { query = Tools.readTextFile(queryFile); } catch (IOException ioe) { throw new UserError(this, ioe, 302, new Object[] { queryFile, ioe.getMessage() }); } if (query == null || query.trim().length() == 0) { throw new UserError(this, 205, queryFile); } return query; } } case DatabaseHandler.QUERY_TABLE: TableName tableName = DatabaseHandler.getSelectedTableName(this); //final String tableName = getParameterAsString(DatabaseHandler.PARAMETER_TABLE_NAME); return "SELECT * FROM " + sc.makeIdentifier(tableName); } return null; } @Override public ConnectionEntry getConnectionEntry() { return DatabaseHandler.getConnectionEntry(this); } @Override protected void addAnnotations(ExampleSet result) { try { if (databaseHandler != null) { result.getAnnotations().setAnnotation(Annotations.KEY_SOURCE, getQuery(databaseHandler.getStatementCreator())); } } catch (OperatorException e) {} } @Override protected boolean isMetaDataCacheable() { return true; } @Override public List<ParameterType> getParameterTypes() { List<ParameterType> list = super.getParameterTypes(); list.addAll(DatabaseHandler.getConnectionParameterTypes(this)); list.addAll(DatabaseHandler.getQueryParameterTypes(this, false)); list.addAll(DatabaseHandler.getStatementPreparationParamterTypes(this)); list.add(new ParameterTypeCategory(ExampleSource.PARAMETER_DATAMANAGEMENT, "Determines, how the data is represented internally.", DataRowFactory.TYPE_NAMES, DataRowFactory.TYPE_DOUBLE_ARRAY, false)); return list; } } // package com.rapidminer.operator.io; // import java.io.BufferedReader; // import java.io.File; // import java.io.IOException; // import java.sql.Clob; // import java.sql.ResultSet; // import java.sql.ResultSetMetaData; // import java.sql.SQLException; // import java.sql.Statement; // import java.sql.Types; // import java.util.Date; // import java.util.LinkedList; // import java.util.List; // import com.rapidminer.example.ExampleSet; // import com.rapidminer.operator.Annotations; // import com.rapidminer.operator.OperatorDescription; // import com.rapidminer.operator.OperatorException; // import com.rapidminer.operator.UserError; // import com.rapidminer.parameter.ParameterType; // import com.rapidminer.tools.Ontology; // import com.rapidminer.tools.Tools; // import com.rapidminer.tools.jdbc.DatabaseHandler; // import com.rapidminer.tools.jdbc.StatementCreator; // import com.rapidminer.tools.jdbc.connection.ConnectionEntry; // import com.rapidminer.tools.jdbc.connection.ConnectionProvider; // /** // * <p>This operator reads an {@link com.rapidminer.example.ExampleSet} from an SQL // * database. The SQL query can be passed to RapidMiner via a parameter or, in case of // * long SQL statements, in a separate file. Please note that column names are // * often case sensitive. Databases may behave differently here.</p> // * // * <p>Please note that this operator supports two basic working modes:</p> // * <ol> // * <li>reading the data from the database and creating an example table in main memory</li> // * <li>keeping the data in the database and directly working on the database table </li> // * </ol> // * <p>The latter possibility will be turned on by the parameter &quot;work_on_database&quot;. // * Please note that this working mode is still regarded as experimental and errors might // * occur. In order to ensure proper data changes the database working mode is only allowed // * on a single table which must be defined with the parameter &quot;table_name&quot;. // * IMPORTANT: If you encounter problems during data updates (e.g. messages that the result set is not // * updatable) you probably have to define a primary key for your table.</p> // * // * <p>If you are not directly working on the database, the data will be read with an arbitrary // * SQL query statement (SELECT ... FROM ... WHERE ...) defined by &quot;query&quot; or &quot;query_file&quot;. // * The memory mode is the recommended way of using this operator. This is especially important for // * following operators like learning schemes which would often load (most of) the data into main memory // * during the learning process. In these cases a direct working on the database is not recommended // * anyway.</p> // * // * <h5>Warning</h5> // * As the java <code>ResultSetMetaData</code> interface does not provide // * information about the possible values of nominal attributes, the internal // * indices the nominal values are mapped to will depend on the ordering // * they appear in the table. This may cause problems only when processes are // * split up into a training process and an application or testing process. // * For learning schemes which are capable of handling nominal attributes, this // * is not a problem. If a learning scheme like a SVM is used with nominal data, // * RapidMiner pretends that nominal attributes are numerical and uses indices for the // * nominal values as their numerical value. A SVM may perform well if there are // * only two possible values. If a test set is read in another process, the // * nominal values may be assigned different indices, and hence the SVM trained // * is useless. This is not a problem for label attributes, since the classes can // * be specified using the <code>classes</code> parameter and hence, all // * learning schemes intended to use with nominal data are safe to use. // * // * @rapidminer.todo Fix the above problem. This may not be possible effeciently since // * it is not supported by the Java ResultSet interface. // * // * @author Ingo Mierswa, Tobias Malbrecht // */ // public class DatabaseDataReader extends AbstractDataReader implements ConnectionProvider { // /** The database connection handler. */ // private DatabaseHandler databaseHandler; // /** This is only used for the case that the data is read into memory. */ // private Statement statement; // public DatabaseDataReader(OperatorDescription description) { // super(description); // getParameterType(DatabaseDataReader.PARAMETER_ERROR_TOLERANT).setHidden(true); // public void tearDown() { // if (this.statement != null) { // try { // this.statement.close(); // } catch (SQLException e) { // logWarning("Cannot close statement."); // this.statement = null; // private String getQuery(StatementCreator sc) throws OperatorException { // switch (getParameterAsInt(DatabaseHandler.PARAMETER_DEFINE_QUERY)) { // case DatabaseHandler.QUERY_QUERY: // String query = getParameterAsString(DatabaseHandler.PARAMETER_QUERY); // if (query != null) { // query = query.trim(); // return query; // case DatabaseHandler.QUERY_FILE: // File queryFile = getParameterAsFile(DatabaseHandler.PARAMETER_QUERY_FILE); // if (queryFile != null) { // String query = null; // try { // query = Tools.readTextFile(queryFile); // } catch (IOException ioe) { // throw new UserError(this, ioe, 302, new Object[] { queryFile, ioe.getMessage() }); // if ((query == null) || (query.trim().length() == 0)) { // throw new UserError(this, 205, queryFile); // return query; // case DatabaseHandler.QUERY_TABLE: // final String tableName = getParameterAsString(DatabaseHandler.PARAMETER_TABLE_NAME); // return "SELECT * FROM " + sc.makeIdentifier(tableName); // return null; // /** // * This method reads the file whose name is given, extracts the database // * access information and the query from it and executes the query. The // * query result is returned as a ResultSet. // */ // public ResultSet getResultSet() throws OperatorException { // ResultSet rs = null; // try { // databaseHandler = DatabaseHandler.getConnectedDatabaseHandler(this); // String query = getQuery(databaseHandler.getStatementCreator()); // if (query == null) { // throw new UserError(this, 202, new Object[] { "query", "query_file", "table_name" }); // // getLogger().info("Executing query: '" + query + "'"); // // this.statement = databaseHandler.createStatement(false); // // rs = this.statement.executeQuery(query); // // log("Query executed."); // rs = databaseHandler.executeStatement(query, true, this, getLogger()); // } catch (SQLException sqle) { // throw new UserError(this, sqle, 304, sqle.getMessage()); // return rs; // @Override // public void processFinished() { // disconnect(); // private void disconnect() { // // close statement // tearDown(); // // close database connection // if (databaseHandler != null) { // try { // databaseHandler.disconnect(); // databaseHandler = null; // } catch (SQLException e) { // logWarning("Cannot disconnect from database: " + e); // @Override // protected DataSet getDataSet() throws OperatorException { // return new DataSet() { // private ResultSet resultSet = getResultSet(); // private ResultSetMetaData metaData = null; // // if (!attributeNamesDefinedByUser()){ // try { // clearAllReaderSettings(); // metaData = resultSet.getMetaData(); // int numberOfColumns = metaData.getColumnCount(); // String[] columnNames = new String[numberOfColumns]; // int[] columnTypes = new int[numberOfColumns]; // for (int i = 0; i < numberOfColumns; i++) { // columnNames[i] = metaData.getColumnLabel(i + 1); // columnTypes[i] = DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(i + 1)); // setAttributeNames(columnNames); // setAttributeNamesDefinedByUser(true); // List<Integer> list = new LinkedList<Integer>(); // for (int i = 0; i<columnTypes.length; i++){ // list.add(columnTypes[i]); // setValueTypes(list); // } catch (SQLException e) { // throw new OperatorException("Could not read result set meta data."); // private Object[] values = new Object[getColumnCount()]; // @Override // // TODO throw operator exception in case of SQL exception // public boolean next() { // try { // if (resultSet.next()) { // for (int i = 0; i < getColumnCount(); i++) { // if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(i + 1)), // Ontology.NUMERICAL)) { // values[i] = Double.valueOf(resultSet.getDouble(i + 1)); // } else if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(i + 1)), // Ontology.DATE_TIME)) { // values[i] = resultSet.getTimestamp(i + 1); // } else if (metaData.getColumnType(i + 1) == Types.CLOB) { // Clob clob = resultSet.getClob(i + 1); // if (clob != null) { // BufferedReader in = null; // try { // in = new BufferedReader(clob.getCharacterStream()); // String line = null; // try { // StringBuffer buffer = new StringBuffer(); // while ((line = in.readLine()) != null) { // buffer.append(line + "\n"); // values[i] = buffer.toString(); // } catch (IOException e) { // values[i] = null; // } finally { // try { // in.close(); // } catch (IOException e) {} // } else { // values[i] = null; // } else if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(i + 1)), // Ontology.NOMINAL)) { // values[i] = resultSet.getString(i + 1); // if (resultSet.wasNull()) { // values[i] = null; // return true; // return false; // } catch (SQLException e) { // // throw new OperatorException(e.getMessage(), e); // return false; // @Override // public int getNumberOfColumnsInCurrentRow() { // // we can rely on columnCount here since it was already set // return getColumnCount(); // @Override // public boolean isMissing(int columnIndex) { // return values[columnIndex] == null; // @Override // public Number getNumber(int columnIndex) { // try { // if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(columnIndex + // 1)), Ontology.NUMERICAL)) { // return (Double) values[columnIndex]; // } catch (SQLException e) { // return null; // @Override // public Date getDate(int columnIndex) { // try { // if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(columnIndex + // 1)), Ontology.DATE_TIME)) { // return (Date) values[columnIndex]; // } catch (SQLException e) { // return null; // @Override // public String getString(int columnIndex) { // try { // if (values[columnIndex] == null){ // return ""; // if (Ontology.ATTRIBUTE_VALUE_TYPE.isA(DatabaseHandler.getRapidMinerTypeIndex(metaData.getColumnType(columnIndex + // 1)), Ontology.NOMINAL)) { // return (String) values[columnIndex]; // return values[columnIndex].toString(); // } catch (SQLException e) { // return null; // @Override // public void close() throws OperatorException { // disconnect(); // @Override // public ConnectionEntry getConnectionEntry() { // return DatabaseHandler.getConnectionEntry(this); // @Override // protected void addAnnotations(ExampleSet result) { // try { // if (databaseHandler != null) { // result.getAnnotations().setAnnotation(Annotations.KEY_SOURCE, getQuery(databaseHandler.getStatementCreator())); // } catch (OperatorException e) { // @Override // public List<ParameterType> getParameterTypes() { // List<ParameterType> list = super.getParameterTypes(); // list.addAll(DatabaseHandler.getConnectionParameterTypes(this)); // list.addAll(DatabaseHandler.getQueryParameterTypes(this, false)); // list.addAll(DatabaseHandler.getStatementPreparationParamterTypes(this)); // return list;
package usp.ime.line.ivprog.controller; import ilm.framework.domain.DomainModel; import java.awt.event.ComponentListener; import java.util.HashMap; import usp.ime.line.ivprog.Services; import usp.ime.line.ivprog.listeners.ICodeListener; import usp.ime.line.ivprog.model.IVPProgram; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.CodeComposite; import usp.ime.line.ivprog.model.components.datafactory.dataobjetcs.Function; import usp.ime.line.ivprog.model.domainaction.ChangeVariableName; import usp.ime.line.ivprog.model.domainaction.ChangeVariableType; import usp.ime.line.ivprog.model.domainaction.DeleteVariable; import usp.ime.line.ivprog.model.domainaction.NewChild; import usp.ime.line.ivprog.model.domainaction.NewVariable; import usp.ime.line.ivprog.view.domaingui.IVPDomainGUI; import usp.ime.line.ivprog.view.domaingui.workspace.IVPFunctionBody; public class IVPController { private IVPProgram program = null; private IVPDomainGUI gui = null; private HashMap actionList; //sera que eh aqui mesmo? private HashMap codeListener; public IVPController(){ actionList = new HashMap(); codeListener = new HashMap(); } public HashMap getActionList(){ return actionList; } public IVPProgram getProgram() { return program; } public void setProgram(IVPProgram program) { this.program = program; } public IVPDomainGUI getGui() { return gui; } public void setGui(IVPDomainGUI gui) { this.gui = gui; } public void initializeModel() { program.initializeModel(); } public void showExecutionEnvironment() { } public void showConstructionEnvironment() { } public void addChild(String containerID, short childType) { NewChild newChild = (NewChild) actionList.get("newchild"); newChild.setClassID(childType); newChild.setContainerID(containerID); newChild.execute(); ICodeListener listener = (ICodeListener) codeListener.get(containerID); listener.childAdded(newChild.getObjectID()); } public void addParameter(String scopeID) { } public void addVariable(String scopeID) { NewVariable newVar = (NewVariable) actionList.get("newvar"); newVar.setScopeID(scopeID); newVar.execute(); } public void deleteVariable(String scopeID, String id) { DeleteVariable delVar = (DeleteVariable) actionList.get("delvar"); delVar.setScopeID(scopeID); delVar.setVariableID(id); delVar.execute(); } public void changeVariableName(String id, String name){ ChangeVariableName changeVarName = (ChangeVariableName) actionList.get("changeVarName"); changeVarName.setVariableID(id); changeVarName.setNewName(name); changeVarName.execute(); } public void changeVariableType(String id, short type){ ChangeVariableType changeVarType = (ChangeVariableType) actionList.get("changeVarType"); changeVarType.setVariableID(id); changeVarType.setNewType(type); changeVarType.execute(); } //TODO: DomainAction public void changeVariableInitialValue(String id, String value){ program.changeVariableInitialValue(id, value); } public void initDomainActionList(DomainModel model) { NewVariable newVar = new NewVariable("newvar","newvar"); newVar.setDomainModel(model); actionList.put("newvar", newVar); DeleteVariable delVar = new DeleteVariable("delvar", "delvar"); delVar.setDomainModel(model); actionList.put("delvar", delVar); ChangeVariableName changeVarName = new ChangeVariableName("changeVarName", "changeVarName"); changeVarName.setDomainModel(model); actionList.put("changeVarName", changeVarName); ChangeVariableType changeVarType = new ChangeVariableType("changeVarType", "changeVarType"); changeVarType.setDomainModel(model); actionList.put("changeVarType", changeVarType); NewChild newChild = new NewChild("newchild", "newchild"); newChild.setDomainModel(model); actionList.put("newchild", newChild); } public void addComponentListener(ICodeListener listener, String id){ codeListener.put(id, listener); } public void removeChild(String containerID, String childID) { } } ;
package io.subutai.core.systemmanager.impl; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.util.concurrent.TimeUnit; import org.apache.commons.configuration.ConfigurationException; import com.google.common.base.Preconditions; import io.subutai.common.command.CommandException; import io.subutai.common.command.CommandResult; import io.subutai.common.command.RequestBuilder; import io.subutai.common.peer.Host; import io.subutai.common.peer.HostNotFoundException; import io.subutai.common.settings.SubutaiInfo; import io.subutai.common.settings.SystemSettings; import io.subutai.core.identity.api.IdentityManager; import io.subutai.core.identity.api.model.User; import io.subutai.core.kurjun.api.KurjunTransferQuota; import io.subutai.core.kurjun.api.TemplateManager; import io.subutai.core.peer.api.PeerManager; import io.subutai.core.systemmanager.api.SystemManager; import io.subutai.core.systemmanager.api.pojo.AdvancedSettings; import io.subutai.core.systemmanager.api.pojo.KurjunSettings; import io.subutai.core.systemmanager.api.pojo.NetworkSettings; import io.subutai.core.systemmanager.api.pojo.PeerSettings; import io.subutai.core.systemmanager.api.pojo.SystemInfo; import io.subutai.core.systemmanager.impl.pojo.AdvancedSettingsPojo; import io.subutai.core.systemmanager.impl.pojo.KurjunSettingsPojo; import io.subutai.core.systemmanager.impl.pojo.NetworkSettingsPojo; import io.subutai.core.systemmanager.impl.pojo.PeerSettingsPojo; import io.subutai.core.systemmanager.impl.pojo.SystemInfoPojo; public class SystemManagerImpl implements SystemManager { private static final String DEFAULT_KURJUN_REPO = "http://repo.critical-factor.com:8080/rest/kurjun"; private TemplateManager templateManager; private IdentityManager identityManager; private PeerManager peerManager; public SystemManagerImpl( final String globalKurjunUrls, final int securePortX1, final int securePortX2, final int securePortX3, final String publicUrl ) throws ConfigurationException { Preconditions.checkNotNull( globalKurjunUrls, "Invalid Global Kurjun URLs could not be null." ); String[] urls = new String[] { globalKurjunUrls }; if ( urls.length < 1 ) { urls = new String[] { DEFAULT_KURJUN_REPO }; } validateGlobalKurjunUrls( urls ); validatePublicUrl( publicUrl ); SystemSettings.setGlobalKurjunUrls( urls ); SystemSettings.setSecurePortX1( securePortX1 ); SystemSettings.setSecurePortX2( securePortX2 ); SystemSettings.setSecurePortX3( securePortX3 ); SystemSettings.setPublicUrl( publicUrl ); } @Override public KurjunSettings getKurjunSettings() throws ConfigurationException { KurjunSettings pojo = new KurjunSettingsPojo(); KurjunTransferQuota publicTransferQuota = templateManager.getTransferQuota( "public" ); KurjunTransferQuota trustTransferQuota = templateManager.getTransferQuota( "trust" ); Long publicDiskQuota = templateManager.getDiskQuota( "public" ); Long trustDiskQuota = templateManager.getDiskQuota( "trust" ); if ( publicDiskQuota != null && publicTransferQuota != null ) { pojo.setPublicDiskQuota( publicDiskQuota ); pojo.setPublicThreshold( publicTransferQuota.getThreshold() ); pojo.setPublicTimeFrame( publicTransferQuota.getTimeFrame() ); pojo.setPublicTimeUnit( publicTransferQuota.getTimeUnit() ); } if ( trustDiskQuota != null && trustTransferQuota != null ) { pojo.setTrustDiskQuota( trustDiskQuota ); pojo.setTrustThreshold( trustTransferQuota.getThreshold() ); pojo.setTrustTimeFrame( trustTransferQuota.getTimeFrame() ); pojo.setTrustTimeUnit( trustTransferQuota.getTimeUnit() ); } pojo.setGlobalKurjunUrls( SystemSettings.getGlobalKurjunUrls() ); pojo.setLocalKurjunUrls( SystemSettings.getLocalKurjunUrls() ); return pojo; } @Override public SystemInfo getSystemInfo() throws ConfigurationException { SystemInfo pojo = new SystemInfoPojo(); pojo.setGitCommitId( SubutaiInfo.getCommitId() ); pojo.setGitBranch( SubutaiInfo.getBranch() ); pojo.setGitCommitUserName( SubutaiInfo.getCommitterUserName() ); pojo.setGitCommitUserEmail( SubutaiInfo.getCommitterUserEmail() ); pojo.setGitBuildUserName( SubutaiInfo.getBuilderUserName() ); pojo.setGitBuildUserEmail( SubutaiInfo.getBuilderUserEmail() ); pojo.setGitBuildTime( SubutaiInfo.getBuildTime() ); pojo.setProjectVersion( SubutaiInfo.getVersion() ); CommandResult result = null; RequestBuilder requestBuilder = new RequestBuilder( "subutai -v" ); try { Host host = peerManager.getLocalPeer().getManagementHost(); result = peerManager.getLocalPeer().execute( requestBuilder, host ); } catch ( HostNotFoundException | CommandException e ) { e.printStackTrace(); pojo.setRhVersion( "No RH connected" ); return pojo; } String[] version = result.getStdOut().split( "\\s" ); pojo.setRhVersion( version[2] ); return pojo; } @Override public void setPeerSettings() { identityManager.setPeerOwner( identityManager.getActiveUser() ); } @Override public PeerSettings getPeerSettings() { String peerOwnerId = identityManager.getPeerOwnerId(); User user = identityManager.getUserByKeyId( peerOwnerId ); PeerSettings pojo = new PeerSettingsPojo(); pojo.setPeerOwnerId( peerOwnerId ); pojo.setUserPeerOwnerName( user.getUserName() ); return pojo; } @Override public void setNetworkSettings( final String securePortX1, final String securePortX2, final String securePortX3, final String publicUrl, final String agentPort ) throws ConfigurationException { SystemSettings.setSecurePortX1( Integer.parseInt( securePortX1 ) ); SystemSettings.setSecurePortX2( Integer.parseInt( securePortX2 ) ); SystemSettings.setSecurePortX3( Integer.parseInt( securePortX3 ) ); SystemSettings.setPublicUrl( publicUrl ); SystemSettings.setAgentPort( Integer.parseInt( agentPort ) ); } @Override public AdvancedSettings getAdvancedSettings() { AdvancedSettings pojo = new AdvancedSettingsPojo(); String content = null; try { content = new String( Files.readAllBytes( Paths.get( System.getenv( "SUBUTAI_APP_DATA_PATH" ) + "/data/log/karaf.log" ) ) ); pojo.setKarafLogs( content ); } catch ( IOException e ) { e.printStackTrace(); } return pojo; } @Override public void setKurjunSettingsUrls( final String[] globalKurjunUrls, final String[] localKurjunUrls ) throws ConfigurationException { SystemSettings.setGlobalKurjunUrls( globalKurjunUrls ); SystemSettings.setLocalKurjunUrls( localKurjunUrls ); } @Override public boolean setKurjunSettingsQuotas( final long publicDiskQuota, final long publicThreshold, final long publicTimeFrame, final long trustDiskQuota, final long trustThreshold, final long trustTimeFrame ) { templateManager.setDiskQuota( publicDiskQuota, "public" ); templateManager.setDiskQuota( trustDiskQuota, "trust" ); KurjunTransferQuota publicTransferQuota = new KurjunTransferQuota( publicThreshold, publicTimeFrame, TimeUnit.HOURS ); KurjunTransferQuota trustTransferQuota = new KurjunTransferQuota( trustThreshold, trustTimeFrame, TimeUnit.HOURS ); boolean isPublicQuotaSaved = templateManager.setTransferQuota( publicTransferQuota, "public" ); boolean isTrustQuotaSaved = templateManager.setTransferQuota( trustTransferQuota, "trust" ); return isPublicQuotaSaved && isTrustQuotaSaved; } @Override public NetworkSettings getNetworkSettings() throws ConfigurationException { NetworkSettings pojo = new NetworkSettingsPojo(); pojo.setSecurePortX1( SystemSettings.getSecurePortX1() ); pojo.setSecurePortX2( SystemSettings.getSecurePortX2() ); pojo.setSecurePortX3( SystemSettings.getSecurePortX3() ); pojo.setPublicUrl( SystemSettings.getPublicUrl() ); pojo.setAgentPort( SystemSettings.getAgentPort() ); return pojo; } protected static void validateGlobalKurjunUrls( final String[] urls ) throws ConfigurationException { for ( String url : urls ) { try { new URL( url ); } catch ( MalformedURLException e ) { throw new ConfigurationException( "Invalid URL: " + url ); } } } protected static void validatePublicUrl( String publicUrl ) throws ConfigurationException { try { new URL( publicUrl ); } catch ( MalformedURLException e ) { throw new ConfigurationException( "Invalid URL: " + publicUrl ); } } public void setTemplateManager( final TemplateManager templateManager ) { this.templateManager = templateManager; } public void setIdentityManager( final IdentityManager identityManager ) { this.identityManager = identityManager; } public void setPeerManager( final PeerManager peerManager ) { this.peerManager = peerManager; } }
package com.t2.biofeedback.device.zephyr; import java.sql.SQLException; import java.util.ArrayList; import java.util.BitSet; import android.media.MediaPlayer; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.Messenger; import android.os.RemoteException; import android.util.Log; import com.t2.biofeedback.Constants; import com.t2.biofeedback.Util; import com.t2.biofeedback.device.BioFeedbackDevice; /** * Encapsulates methods necessary to communicate with a Bluetooth Zephyr device * * @author scott.coleman * */ public abstract class ZephyrDevice extends BioFeedbackDevice { private static final String TAG = Constants.TAG; boolean mDebug = true; long timout; static ZephyrDevice instance; ZephyrDevice(ArrayList<Messenger> serverListeners) { instance = this; this.mServerListeners = serverListeners; } /* (non-Javadoc) * @see com.t2.biofeedback.device.BioFeedbackDevice#onSetLinkTimeout(long) */ @Override protected void onSetLinkTimeout(long linkTimeout) { timout = linkTimeout; } /* (non-Javadoc) * @see com.t2.biofeedback.device.BioFeedbackDevice#onDeviceConnected() */ @Override protected void onDeviceConnected() { super.onDeviceConnected(); Handler handler = new Handler(); handler.postDelayed(new Runnable() { public void run() { Log.v(TAG, "Tell the device to start sending periodic data."); ZephyrMessage m = new ZephyrMessage( 0xA4, new byte[] { (byte) 0, (byte) 0, (byte) 0x10, // 10 sec (byte) 0x27, // (byte) 0x38, // 1 sec // (byte) 0x03, }, ZephyrMessage.ETX ); instance.write(m); Handler handler1 = new Handler(); handler1.postDelayed(new Runnable() { public void run() { // Tell the device to return periodic data. ZephyrMessage m = new ZephyrMessage( 0x14, new byte[] { 0x01 }, ZephyrMessage.ETX ); instance.write(m); } }, 500); } }, 500); // AsyncTask<Integer, Void, Void> asyncTask = new AsyncTask<Integer, Void, Void>() { // @Override // protected Void doInBackground(Integer... integers) { // return null; // @Override // protected void onPostExecute(Void aVoid) { // asyncTask.execute(0, 0); } /* (non-Javadoc) * @see com.t2.biofeedback.device.SerialBTDevice#onBeforeConnectionClosed() */ @Override protected void onBeforeConnectionClosed() { Log.v(TAG, "Tell the device to stop sending periodic data."); ZephyrMessage m = new ZephyrMessage( 0x14, new byte[] { 0x00 }, ZephyrMessage.ETX ); this.write(m); } /* (non-Javadoc) * @see com.t2.biofeedback.device.SerialBTDevice#onBytesReceived(byte[]) * * Parses the received bytes into a Zephyr message and forwards them */ @Override protected void onBytesReceived(byte[] bytes) { if (bytes[1]== 0x20) { this.onMessageReceived(ZephyrMessage.parse(bytes)); } if (bytes[1]== 0x14) { Log.v(TAG, " *********** Received response to start sending"); } if (bytes[1]== 0xa4) { Log.v(TAG, " *********** Received response to Timeout"); } if (bytes[1]== 0x23) { Log.v(TAG, "ZephyrHeartbeat "); } } /** * Receives parsed message from Zephyr device. Formats it to look like * a Spine message then sends it to the Spine server. * * @param msg Message to send to Spine server */ private void onMessageReceived(ZephyrMessage msg) { if(!msg.validPayload) { return; } // TODO: See if we want to send Zephyr heartbeat messages // we might want to send all messages, but for now, since there are a lot of heartbeat // messages for now we'll only send data messages if(msg.msgId == 0x20) { if (mDebug) Util.logHexByteString(TAG, msg.payload); // Use this to send the message directly to the main aplication //this.onDeviceMessage(msg.payload); // Use this to send the message directly to the main aplication // Use this to send the message via the normal SPINE mechanism // We need to build a SPINE-style message // SPINE HEADER // desc: | Vers:Ext:Type | GroupId | SourceId | DestId | Seq# | TotalFrag | Frag #| // size: | 2:1:5 | 8 | 16 | 16 | 8 | 8 | 8 | // value:| C4 | 0xAB | 0xfff1 | 0 | 0 | 1 | 1 | // SPINE MESSAGE // desc: | Func | Sensor | Feat| Feat | Feat | Bat Level | Heart Rate | Resp Rate | Skin Temp | Label | // desc: | Code | Code | Cnt | Code | Bitmask | Value | Value | Value | Value | Length | // size: | 8 | 8 | 8 | 8 | 8 | 32 | 32 | 32 | 32 | 8 | // value:| 9 | C | 4 | 9 | 0x0f | xxxxxxxx | xxxxxxxx | xxxxxxxx | xxxxxxxx | 0 | // Note: Pkt Type: 4 = data, Function code: 1 = Raw Data, Sensor code: C = Zephyr data final int ZEPHYR_FUNCT_CODE = 0x09; final int ZEPHER_FUNCT_TYPE = 1; //(Raw data) final int ZEPHYR_SENSOR_CODE = 0x0C; final int SPINE_HEADER_SIZE = 9; final int ZEPHYR_MSG_SIZE = 22; byte[] zepherMessage = new byte[ZEPHYR_MSG_SIZE + SPINE_HEADER_SIZE]; // First add spine header int i = 0; // Header zepherMessage[i++] = (byte) 0xc4; zepherMessage[i++] = (byte) 0xab; zepherMessage[i++] = (byte) 0xff; zepherMessage[i++] = (byte) 0xf1; zepherMessage[i++] = (byte) 0x00; zepherMessage[i++] = (byte) 0x00; zepherMessage[i++] = (byte) 0x00; zepherMessage[i++] = (byte) 0x01; zepherMessage[i++] = (byte) 0x01; // MEssage zepherMessage[i++] = ZEPHYR_FUNCT_CODE; zepherMessage[i++] = ZEPHYR_SENSOR_CODE; zepherMessage[i++] = 1; //1 feature zepherMessage[i++] = ZEPHER_FUNCT_TYPE; zepherMessage[i++] = 0x0f; // Bitmask zepherMessage[i++] = msg.raw[53]; zepherMessage[i++] = msg.raw[54]; zepherMessage[i++] = 0; zepherMessage[i++] = 0; zepherMessage[i++] = 0; zepherMessage[i++] = 0; zepherMessage[i++] = msg.payload[10]; zepherMessage[i++] = msg.payload[9]; zepherMessage[i++] = 0; zepherMessage[i++] = 0; zepherMessage[i++] = msg.payload[12]; zepherMessage[i++] = msg.payload[11]; zepherMessage[i++] = 0; zepherMessage[i++] = 0; zepherMessage[i++] = msg.payload[14]; zepherMessage[i++] = msg.payload[13]; zepherMessage[i++] = 0; // No label // this.onSpineMessage(zepherMessage); // Old method of sending data to server if (mServerListeners != null) { // if (mDebug) Log.i(TAG, "1"); for (i = mServerListeners.size()-1; i >= 0; i // if (mDebug) Log.i(TAG, "2"); try { Bundle b = new Bundle(); b.putByteArray("message", zepherMessage); Message msg1 = Message.obtain(null, MSG_SET_ARRAY_VALUE); msg1.setData(b); mServerListeners.get(i).send(msg1); } catch (RemoteException e) { // The client is dead. Remove it from the list; we are going through the list from back to front so this is safe to do inside the loop. mServerListeners.remove(i); } } } } } private void write(ZephyrMessage msg) { this.write(msg.getBytes()); } public static byte[] bitSetToByteArray(BitSet bs) { byte[] bytes = new byte[(int) Math.ceil(bs.size() / 8)]; for(int i = 0; i < bs.size(); i++) { if(bs.get(i) == true) { bytes[i / 8] |= 1 << i; } } return bytes; } }
package com.tobykurien.google_news; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import android.view.View; import android.view.View.OnLongClickListener; import android.webkit.CookieSyncManager; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import android.widget.ProgressBar; import android.widget.Toast; public class GoogleNewsActivity extends Activity { WebView wv; String[] googleSites = new String[]{ "google.com", "youtube.com", "google.co.za", "gmail.com" }; /** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); CookieSyncManager.createInstance(this); } /** * Return the title bar progress bar to indicate progress * * @return */ public ProgressBar getProgressBar() { return (ProgressBar) findViewById(R.id.site_progress); } /** * Return the web view in which to display the site * * @return */ public WebView getWebView() { return (WebView) findViewById(R.id.site_webview); } /** * Return the site URL to load * * @return */ public String getSiteUrl() { return "https://mobile.google.com/"; } @Override protected void onStart() { super.onStart(); wv = getWebView(); if (wv == null) { finish(); return; } final ProgressBar pb = getProgressBar(); if (pb != null) pb.setVisibility(View.VISIBLE); WebView.enablePlatformNotifications(); WebSettings settings = wv.getSettings(); settings.setJavaScriptEnabled(true); settings.setJavaScriptCanOpenWindowsAutomatically(false); //Enable local database. settings.setDatabaseEnabled(true); String databasePath = this.getApplicationContext().getDir("database", Context.MODE_PRIVATE).getPath(); settings.setDatabasePath(databasePath); //Enable manifest cache. String cachePath = this.getApplicationContext().getDir("cache", Context.MODE_PRIVATE).getPath(); settings.setAppCachePath(cachePath); settings.setAllowFileAccess(true); settings.setAppCacheEnabled(true); settings.setDomStorageEnabled(true); settings.setAppCacheMaxSize(1024 * 1024 * 8); settings.setCacheMode(WebSettings.LOAD_DEFAULT); // wv.getSettings().setUserAgentString("android"); wv.setWebViewClient(new WebViewClient() { @Override public void onPageFinished(WebView view, String url) { if (pb != null) pb.setVisibility(View.GONE); CookieSyncManager.getInstance().sync(); super.onPageFinished(view, url); } @Override public void onPageStarted(WebView view, String url, Bitmap favicon) { Log.d("Google", "loading " + url); if (pb != null) pb.setVisibility(View.VISIBLE); super.onPageStarted(view, url, favicon); } @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { Uri uri = Uri.parse(url); Log.d("Google", "should override " + uri); if ((uri.getScheme().equals("http") || uri.getScheme().equals("https")) && !isGoogleSite(uri)) { Intent i = new Intent(android.content.Intent.ACTION_VIEW); i.setData(uri); startActivity(i); return true; } else if (uri.getScheme().equals("mailto")) { Intent i = new Intent(android.content.Intent.ACTION_SEND); i.putExtra(android.content.Intent.EXTRA_EMAIL, url); i.setType("text/html"); startActivity(i); return true; } else if (uri.getScheme().equals("market")) { Intent i = new Intent(android.content.Intent.ACTION_VIEW); i.setData(uri); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(i); return true; } return super.shouldOverrideUrlLoading(view, url); } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { // TODO Auto-generated method stub super.onReceivedError(view, errorCode, description, failingUrl); Toast.makeText(GoogleNewsActivity.this, description, Toast.LENGTH_LONG).show(); } }); wv.addJavascriptInterface(new Object() { // attempt to override the _window function used by Google+ mobile app public void _window(String url) { throw new IllegalStateException(url); // to indicate success } }, "window"); wv.setOnLongClickListener(new OnLongClickListener() { @Override public boolean onLongClick(View arg0) { String url = wv.getHitTestResult().getExtra(); if (url != null) { Intent i = new Intent(android.content.Intent.ACTION_VIEW); i.setData(Uri.parse(url)); i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(i); return true; } return false; } }); wv.loadUrl(getSiteUrl()); } private boolean isGoogleSite(Uri uri) { //String url = uri.toString(); String host = uri.getHost(); for (String site : googleSites) { if (host.toLowerCase().endsWith(site.toLowerCase())) { return true; } } return false; } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if ((keyCode == KeyEvent.KEYCODE_BACK) && wv.canGoBack()) { wv.goBack(); return true; } return super.onKeyDown(keyCode, event); } }
package org.nakedobjects.object.reflect; import org.nakedobjects.NakedObjects; import org.nakedobjects.object.Naked; import org.nakedobjects.object.NakedObject; import org.nakedobjects.object.NakedObjectRuntimeException; import org.nakedobjects.object.NakedObjectSpecification; import org.nakedobjects.object.NakedValue; import org.nakedobjects.object.Persistable; import org.nakedobjects.object.ResolveState; import org.nakedobjects.object.control.Hint; import org.nakedobjects.object.persistence.ConcurrencyException; import org.nakedobjects.object.persistence.Oid; import org.nakedobjects.utility.Assert; import org.nakedobjects.utility.ToString; import java.util.Date; import org.apache.log4j.Logger; public class PojoAdapter implements NakedObject { private final static Logger LOG = Logger.getLogger(PojoAdapter.class); private String defaultTitle; private Date modifiedTime; private String modifierBy; private Oid oid; private Object pojo; private transient ResolveState resolveState; private NakedObjectSpecification specification; private long version; public PojoAdapter(Object pojo) { this.pojo = pojo; resolveState = ResolveState.NEW; } public void checkLock(long version) { if (version != this.version) { throw new ConcurrencyException(modifierBy + " changed " + specification.getShortName() + " object (" + titleString() + ") at " + modifiedTime + " (" + this.version + "~" + version + ")"); } } public void clearAssociation(NakedObjectAssociation specification, NakedObject associate) { resolveIfOnlyAGhost(this); LOG.debug("clearAssociation " + specification.getName() + "/" + associate + " in " + this); specification.clearAssociation(this, associate); } public void clearCollection(OneToManyAssociation association) { resolveIfOnlyAGhost(this); LOG.debug("clearCollection " + association.getName() + " in " + this); association.clearCollection(this); } public void clearValue(OneToOneAssociation association) { resolveIfOnlyAGhost(this); LOG.debug("clearValue " + association.getName() + " in " + this); association.clearValue(this); } public void debugClearResolved() { resolveState = ResolveState.GHOST; } /** * Asks the reflector to tell the pojo that this object has been deleted. */ public void destroyed() { resolveIfOnlyAGhost(this); LOG.debug("deleted notification for " + this); specification.deleted(this); } /** * Dissociates the POJO from this adapter. */ public void dispose() { pojo = null; } public boolean equals(Object other) { return super.equals(other); /* * if (other == this) { return true; } * * if (other instanceof PojoAdapter) { // we don't delegate to equals(PojoAdapter) because * we // don't want to do the identity test again. PojoAdapter otherPojoAdapter = * (PojoAdapter) other; return otherPojoAdapter.pojo == pojo; // * otherPojoAdapter.pojo.equals(pojo); } return false; */} /** * Overloaded to allow compiler to link directly if we know the compile-time type. (possible * performance improvement - called 166,000 times in normal ref data fixture. / public boolean * equals(PojoAdapter otherPojoAdapter) { if (otherPojoAdapter == this) { return true; } return * otherPojoAdapter.pojo == pojo; // otherPojoAdapter.pojo.equals(pojo); } */ public Naked execute(Action action, Naked[] parameters) { resolveIfOnlyAGhost(this); LOG.debug("execute " + action.getName() + " in " + this); for (int i = 0; parameters != null && i < parameters.length; i++) { if (parameters[i] instanceof NakedObject) { resolveIfOnlyAGhost((NakedObject) parameters[i]); } } Naked result = action.execute(this, parameters); return result; } protected void finalize() throws Throwable { super.finalize(); LOG.debug("finalizing pojo: " + pojo); } public NakedObject getAssociation(OneToOneAssociation field) { resolveIfOnlyAGhost(this); return (NakedObject) field.get(this); } public Naked getField(NakedObjectField field) { resolveIfOnlyAGhost(this); return field.get(this); } public NakedObjectField[] getFields() { return getSpecification().getFields(); } public Hint getHint(Action action, Naked[] parameterValues) { resolveIfOnlyAGhost(this); return action.getHint(this, parameterValues); } public Hint getHint(NakedObjectField field, Naked value) { resolveIfOnlyAGhost(this); if (field instanceof OneToOneAssociation) { return ((OneToOneAssociation) field).getHint(this, value); } else if (field instanceof OneToManyAssociation) { return ((OneToManyAssociation) field).getHint(this); } else { throw new NakedObjectRuntimeException(); } } /** * Returns the short name from this objects NakedObjectSpecification * * TODO allow the reflector to set up a icon name */ public String getIconName() { return null; } public String getLabel(Action action) { return action.getLabel(this); } public String getLabel(NakedObjectField field) { return field.getLabel(this); } public Object getObject() { return pojo; } public Oid getOid() { return oid; } public ActionParameterSet getParameters(Action action) { return action.getParameters(this); } public NakedObjectSpecification getSpecification() { if (specification == null) { specification = NakedObjects.getSpecificationLoader().loadSpecification(getObject().getClass()); defaultTitle = "A " + specification.getSingularName().toLowerCase(); } return specification; } public NakedValue getValue(OneToOneAssociation field) { resolveIfOnlyAGhost(this); return (NakedValue) field.get(this); } public long getVersion() { return version; } public NakedObjectField[] getVisibleFields() { return getSpecification().getVisibleFields(this); } public void initAssociation(NakedObjectAssociation field, NakedObject associatedObject) { LOG.debug("initAssociation " + field.getName() + "/" + associatedObject + " in " + this); field.initAssociation(this, associatedObject); } public void initOneToManyAssociation(OneToManyAssociation field, NakedObject[] instances) { LOG.debug("initAssociation " + field.getName() + " with " + instances.length + "instances in " + this); field.initOneToManyAssociation(this, instances); } public void initValue(OneToOneAssociation field, Object object) { LOG.debug("initValue " + field.getName() + " with " + object + " in " + this); field.initValue(this, object); } public boolean isEmpty(NakedObjectField field) { resolveIfOnlyAGhost(this); return field.isEmpty(this); } private void resolveIfOnlyAGhost(NakedObject object) { ResolveState resolveState = object.getResolveState(); if (resolveState.isGhost()) { LOG.info("Unresolved object attempting to be used; resolving it immediately: " + object); NakedObjects.getObjectManager().resolveImmediately(object); } } public Persistable persistable() { return getSpecification().persistable(); } public void persistedAs(Oid oid) { LOG.debug("set OID " + oid + " " + this); Assert.assertTrue("Cannot make a non-transient object persistent", this, getResolveState().isTransient()); Assert.assertTrue("Oid can't be set again", this, getOid() == null); this.oid = oid; resolveState = ResolveState.RESOLVED; } public void setAssociation(NakedObjectAssociation field, NakedObject associatedObject) { resolveIfOnlyAGhost(this); LOG.debug("setAssociation " + field.getName() + " with " + associatedObject + " in " + this); field.setAssociation(this, associatedObject); } public void setOptimisticLock(long version, String modifierBy, Date modifiedTime) { this.version = version; this.modifierBy = modifierBy; this.modifiedTime = modifiedTime; } public void setValue(OneToOneAssociation field, Object object) { resolveIfOnlyAGhost(this); LOG.debug("setValue " + field.getName() + " with " + object + " in " + this); field.setValue(this, object); } /** * Returns the title from the underlying business object. If the object has not yet been * resolved the specification will be asked for a unresolved title, which could of been * persisted by the persistence mechanism. If either of the above provides null as the title * then this method will return a title relating to the name of the object type, e.g. "A * Customer", "A Product". */ public String titleString() { NakedObjectSpecification specification = getSpecification(); //String title = specification.getTitle().title(this); String title = specification.getTitle(this); if (title == null) { title = defaultTitle; } return title; } public synchronized String toString() { ToString str = new ToString(this); str.append(resolveState.code()); Oid oid = getOid(); if (oid != null) { str.append(":"); str.append(oid.toString()); } else { str.append(":-"); } str.setAddComma(); str.append("specification", specification == null ? "undetermined" : specification.getShortName()); if (resolveState.isTransient() || resolveState.isResolved()) { // don't do title of unresolved objects as this may force the resolving of the object. str.append("title", titleString()); } str.appendAsHex("pojo-hash", pojo.hashCode()); str.appendAsHex("version", version); str.appendAsTimestamp("modified", modifiedTime); return str.toString(); } public void recreatedAs(Oid oid) { changeState(ResolveState.GHOST); this.oid = oid; } public void changeState(ResolveState newState) { Assert.assertTrue("can't change from " + resolveState.name() + " to " + newState.name() + ": " + this, resolveState .isValidToChangeTo(newState)); LOG.debug("recreate - change state " + this + " to " + newState); resolveState = newState; } public ResolveState getResolveState() { return resolveState; } }
package org.mondo.collaboration.security.lens.bx.online; import java.io.File; import java.lang.reflect.InvocationTargetException; import java.util.ConcurrentModificationException; import java.util.EnumMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl; import org.eclipse.incquery.runtime.base.api.BaseIndexOptions; import org.eclipse.incquery.runtime.emf.EMFScope; import org.eclipse.incquery.runtime.exception.IncQueryException; import org.eclipse.viatra.modelobfuscator.api.DataTypeObfuscator; import org.mondo.collaboration.security.lens.arbiter.SecurityArbiter; import org.mondo.collaboration.security.lens.bx.AbortReason.DenialReason; import org.mondo.collaboration.security.lens.bx.LensTransformationExecution; import org.mondo.collaboration.security.lens.bx.RelationalLensXform; import org.mondo.collaboration.security.lens.context.MondoLensScope; import org.mondo.collaboration.security.lens.context.keys.CorrespondenceKey; import org.mondo.collaboration.security.lens.correspondence.EObjectCorrespondence; import org.mondo.collaboration.security.lens.correspondence.EObjectCorrespondence.UniqueIDSchemeFactory; import org.mondo.collaboration.security.lens.emf.ModelIndexer; import org.mondo.collaboration.security.lens.util.LiveTable; import org.mondo.collaboration.security.macl.xtext.mondoAccessControlLanguage.AccessControlModel; import org.mondo.collaboration.security.macl.xtext.rule.mACLRule.User; import com.google.common.collect.ImmutableSet; /** * An online synchronization session for a variable number of users ('legs'), between the single gold model and a front model for each leg. * @author Bergmann Gabor * */ // TODO: use Transactional EMF public class OnlineCollaborationSession { public static final URI FAKE_MAIN_RESOURCE_URI = URI.createURI("file: "org.mondo.collaboration.security.lens.bx.fake-path" + File.separator + "org.mondo.collaboration.security.lens.bx.fake-root-resource"); private final URI goldConfinementURI; private final ResourceSet goldResourceSet; private final UniqueIDSchemeFactory uniqueIDFactory; private final Resource policyResource; private final SecurityArbiter arbiter; private final ModelIndexer goldIndexer; private final AccessControlModel accessControlModel; private final Set<Leg> legs = new HashSet<>(); /** * For serializing concurrent modifications by Legs */ Lock mutex = new ReentrantLock(); /** * @param goldConfinementURI the writable area in the folder hierarchy for the gold model * @param goldResourceSet the gold model * @param uniqueIDFactory the scheme for identifying model elements * @param policyResource the resource of the policy model * @throws IncQueryException */ public OnlineCollaborationSession(URI goldConfinementURI, ResourceSet goldResourceSet, UniqueIDSchemeFactory uniqueIDFactory, Resource policyResource) throws IncQueryException { super(); this.goldConfinementURI = goldConfinementURI; this.goldResourceSet = goldResourceSet; this.uniqueIDFactory = uniqueIDFactory; this.policyResource = policyResource; accessControlModel = (AccessControlModel) policyResource.getContents().get(0); arbiter = new SecurityArbiter( accessControlModel.getPolicy(), null /*user*/, ImmutableSet.of(goldResourceSet), new BaseIndexOptions()); goldIndexer = new ModelIndexer( goldConfinementURI, goldResourceSet, EMFScope.extractUnderlyingEMFIndex(arbiter.getPolicyQueryEngine())); } public class Leg { private final String userName; private DataTypeObfuscator<String> stringObfuscator; private final URI frontConfinementURI; private final ResourceSet frontResourceSet; private MondoLensScope scope; private final RelationalLensXform lens; /** * Creates an in-memory front model for the user and immediately synchronizes the gold model onto it. * @param userName the name of the user for which the online synchronization is conducted * @param stringObfuscator the attribute obfuscator seeded for the specific user * @throws InvocationTargetException */ public Leg(String userName, DataTypeObfuscator<String> stringObfuscator) throws InvocationTargetException { this(userName, stringObfuscator, true, new ResourceSetImpl(), FAKE_MAIN_RESOURCE_URI); } /** * Creates a * <p> Use {@link #OnlineCollaborationSession(String, DataTypeObfuscator)} instead * unless you want to fine-tune the front model. * * @param userName the name of the user for which the online synchronization is conducted * @param stringObfuscator the attribute obfuscator seeded for the specific user * @param startWithGet whether to immediately initialize model contents by a GET (overwrites existing content) * @param frontConfinementURI the writable area in the folder hierarchy for the front model * @param frontResourceSet preinitialized front model * @throws InvocationTargetException */ public Leg(String userName, DataTypeObfuscator<String> stringObfuscator, boolean startWithGet, ResourceSet frontResourceSet, URI frontConfinementURI) throws InvocationTargetException { super(); this.userName = userName; this.stringObfuscator = stringObfuscator; this.frontConfinementURI = frontConfinementURI; this.frontResourceSet = frontResourceSet; legs.add(this); this.lens = setupLens(startWithGet); if (startWithGet) { overWriteFromGold(); } } /** * Sets up the bidirectional lens transformation between the front model of the Leg and the common gold model. * <p> Must read the front model. For Transactional EMF or other model-level R/W access control, * subclass and override to wrap in a read-enabled transaction. */ protected RelationalLensXform setupLens(boolean startWithGet) { User user = SecurityArbiter.getUserByName(accessControlModel, userName); if (user == null) throw new IllegalArgumentException(String.format("User of name %s not found in MACL resource %s", userName, policyResource.getURI())); ModelIndexer frontIndexer = new ModelIndexer( frontConfinementURI, frontResourceSet); // if using in-memory resource with fake URI, then front model is initially empty, no need to gather EObject correspondences LiveTable correspondenceTable = startWithGet ? new LiveTable() : EObjectCorrespondence.buildEObjectCorrespondenceTable( goldIndexer, uniqueIDFactory.apply(goldConfinementURI), frontIndexer, uniqueIDFactory.apply(frontConfinementURI) ); Map<CorrespondenceKey, LiveTable> correspondenceTables = new EnumMap<CorrespondenceKey, LiveTable>(CorrespondenceKey.class); correspondenceTables.put(CorrespondenceKey.EOBJECT, correspondenceTable); scope = new MondoLensScope(arbiter, goldIndexer, frontIndexer, correspondenceTables); return new RelationalLensXform(scope, user, stringObfuscator); } /** * Uses the GET transformation of the lens to update the front model so that it reflects the contents of the gold model. * <p> May write to the front model. For Transactional EMF or other model-level R/W access control, * subclass and override to wrap in a write-enabled transaction. */ public void overWriteFromGold() { LensTransformationExecution propagatingExecution = lens.doGet(); // propagation error if (propagatingExecution.isAborted()) { // this must be an exception abort, because GET, so we just rethrow the stored runtime exception propagatingExecution.extractDenialReason(); // should not reach this throw new IllegalStateException(); } } /** * Use this method to indicate user modifications of the front model. * <p> If client can wrap all user modifications to this front model into a single callback, * then serializability is automatically enforced. * * @param modificationTransaction a callback that encloses the actual modifications to the front model. * Can be null if it is not possible to enclose modifications; * serializability must be ensured separately in that case. * @throws InvocationTargetException if the modification transaction throws an exception */ public DenialReason atomicallyModify(Callable<?> modificationTransaction) throws InvocationTargetException { mutex.lock(); try { try { if (modificationTransaction == null) modificationTransaction.call(); } catch (Exception e) { // // try to roll back // overWriteFromGold(); throw new InvocationTargetException(e); } final LensTransformationExecution lensExecution = propagateToGold(); return lensExecution.extractDenialReason(); } finally { mutex.unlock(); } } /** * Assumption: called within a write-enabled transaction for this Leg. * If the modification is denied, the transaction must be rolled back by the client. * * @return the reason the modification was denied, or null if it was successful * @throws ConcurrentModificationException if another Leg thread has preempted this modification */ public DenialReason trySubmitModification() { if (!mutex.tryLock()) { // another Leg has acquired the mutex and is performing modifications throw new ConcurrentModificationException(); } // acquired try { // synchronized(this)? final LensTransformationExecution lensExecution = propagateToGold(); return lensExecution.extractDenialReason(); } finally { mutex.unlock(); } } protected LensTransformationExecution propagateToGold() { final LensTransformationExecution lensExecution = lens.doPutback(true ); // propagate successful PUTBACK to the other front models if (!lensExecution.isAborted()) { for (Leg leg : legs) { leg.overWriteFromGold(); } } return lensExecution; } public String getUserName() { return userName; } public ResourceSet getFrontResourceSet() { return frontResourceSet; } public MondoLensScope getScope() { return scope; } public void dispose() { legs.remove(userName); } } }
package z21Drive.actions; import z21Drive.LocoAddressOutOfRangeException; public class Z21ActionLanXCVPomWriteByte extends Z21Action{ /** * Writing a CV on the MainTrack * * @param cv The CV to read. * @param value Value * @param locoAddress the Adress of the Loco * @throws LocoAddressOutOfRangeException Thrown if loco address is out of supported range. */ public Z21ActionLanXCVPomWriteByte(int locoAddress, int cv, int value) throws LocoAddressOutOfRangeException{ byteRepresentation.add(Byte.decode("0x40")); byteRepresentation.add(Byte.decode("0x00")); if (locoAddress < 1 || locoAddress > 63) throw new LocoAddressOutOfRangeException(locoAddress); addDataToByteRepresentation(new Object[]{ locoAddress, cv, value}); addLenByte(); } @Override public void addDataToByteRepresentation(Object[] objs) { //Add all the data byteRepresentation.add((byte) 0xE6); // X-Header byteRepresentation.add(Byte.decode("0x30")); // DB 0 // Adding Loco-Addr byte Adr_MSB; byte Adr_LSB; String binary = String.format("%16s", Integer.toBinaryString((Integer) objs[0])).replace(' ', '0'); String binaryMSB = binary.substring(0, 8); String binaryLSB = binary.substring(8); if (binary.replaceFirst ("^0*", "").toCharArray().length <= 8) Adr_MSB = 0; else Adr_MSB = (byte) Integer.parseInt(binaryMSB, 2); Adr_LSB = (byte) Integer.parseInt(binaryLSB, 2); byteRepresentation.add((byte) (Adr_MSB)); // DB 1 byteRepresentation.add(Adr_LSB); // DB 2 // Adding CV int cv = (int) objs[1] - 1; // 0 => CV1, ... int value = (int) objs[2]; byteRepresentation.add((byte) (0xEC | cv >> 8 )); // DB3 byteRepresentation.add((byte) (cv & 0xFF)); // DB4 byteRepresentation.add((byte) (value & 0xFF)); // DB5 byteRepresentation.add((byte) (byteRepresentation.get(2) & 0xff ^ byteRepresentation.get(3) & 0xff^ byteRepresentation.get(4) & 0xff^ byteRepresentation.get(5) & 0xff^ byteRepresentation.get(6) & 0xff^ byteRepresentation.get(7) & 0xff^ byteRepresentation.get(8) & 0xff)); } }
package com.opengamma.analytics.financial.model.volatility.surface; import static com.opengamma.analytics.math.FunctionUtils.square; import java.util.Arrays; import org.apache.commons.lang.ObjectUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.analytics.financial.model.volatility.smile.fitting.interpolation.GeneralSmileInterpolator; import com.opengamma.analytics.financial.model.volatility.smile.fitting.interpolation.SmileInterpolatorSABR; import com.opengamma.analytics.financial.model.volatility.smile.fitting.interpolation.SurfaceArrayUtils; import com.opengamma.analytics.financial.model.volatility.smile.fitting.sabr.SmileSurfaceDataBundle; import com.opengamma.analytics.math.function.Function; import com.opengamma.analytics.math.function.Function1D; import com.opengamma.analytics.math.interpolation.CombinedInterpolatorExtrapolatorFactory; import com.opengamma.analytics.math.interpolation.Interpolator1D; import com.opengamma.analytics.math.interpolation.Interpolator1DFactory; import com.opengamma.analytics.math.interpolation.data.Interpolator1DDataBundle; import com.opengamma.analytics.math.surface.FunctionalDoublesSurface; import com.opengamma.analytics.util.serialization.InvokedSerializedForm; import com.opengamma.util.ArgumentChecker; public class VolatilitySurfaceInterpolator { private static final Logger LOGGER = LoggerFactory.getLogger(VolatilitySurfaceInterpolator.class); private static final GeneralSmileInterpolator DEFAULT_SMILE_INTERPOLATOR = new SmileInterpolatorSABR(); private static final Interpolator1D DEFAULT_TIME_INTERPOLATOR = CombinedInterpolatorExtrapolatorFactory.getInterpolator(Interpolator1DFactory.NATURAL_CUBIC_SPLINE, Interpolator1DFactory.LINEAR_EXTRAPOLATOR); private static final boolean USE_LOG_TIME = true; private static final boolean USE_INTEGRATED_VARIANCE = true; private static final boolean USE_LOG_VALUE = true; private final GeneralSmileInterpolator _smileInterpolator; private final Interpolator1D _timeInterpolator; private final boolean _useLogTime; private final boolean _useLogVar; private final boolean _useIntegratedVariance; public VolatilitySurfaceInterpolator() { this(DEFAULT_SMILE_INTERPOLATOR, DEFAULT_TIME_INTERPOLATOR, USE_LOG_TIME, USE_INTEGRATED_VARIANCE, USE_LOG_VALUE); } public VolatilitySurfaceInterpolator(final GeneralSmileInterpolator smileInterpolator) { this(smileInterpolator, DEFAULT_TIME_INTERPOLATOR, USE_LOG_TIME, USE_INTEGRATED_VARIANCE, USE_LOG_VALUE); } public VolatilitySurfaceInterpolator(final Interpolator1D timeInterpolator) { this(DEFAULT_SMILE_INTERPOLATOR, timeInterpolator, USE_LOG_TIME, USE_INTEGRATED_VARIANCE, USE_LOG_VALUE); } public VolatilitySurfaceInterpolator(final GeneralSmileInterpolator smileInterpolator, final Interpolator1D timeInterpolator) { this(smileInterpolator, timeInterpolator, USE_LOG_TIME, USE_INTEGRATED_VARIANCE, USE_LOG_VALUE); } /** * <b>Note</b> The combination of useIntegratedVariance = true, useLogTime != useLogValue can produce very bad results, including considerable dips/humps between * points at the same level (all other combinations give a flat line), and thus should be avoided. * @param useIntegratedVariance if true integrated variance ($\sigma^2t$) is used in the interpolation, otherwise variance is used * @param useLogTime if true the natural-log of the time values are used in interpolation, if false the time values are used directly. This can be useful if the * expiries vary greatly in magnitude * @param useLogVariance If true the log of variance (actually either variance or integrated variance) is used in the interpolation */ public VolatilitySurfaceInterpolator(final boolean useIntegratedVariance, final boolean useLogTime, final boolean useLogVariance) { _smileInterpolator = DEFAULT_SMILE_INTERPOLATOR; _timeInterpolator = DEFAULT_TIME_INTERPOLATOR; _useLogTime = useLogTime; _useIntegratedVariance = useIntegratedVariance; _useLogVar = useLogVariance; if (_useIntegratedVariance && _useLogVar != _useLogTime) { LOGGER.warn("The combination of useIntegratedVariance = true, useLogTime != useLogValue can produce very bad results, including considerable dips between " + "points at the same level (all other combinations give a flat line), and thus should be avoided."); } } public VolatilitySurfaceInterpolator(final GeneralSmileInterpolator smileInterpolator, final boolean useLogTime, final boolean useIntegratedVariance, final boolean useLogValue) { this(smileInterpolator, DEFAULT_TIME_INTERPOLATOR, useLogTime, useIntegratedVariance, useLogValue); } public VolatilitySurfaceInterpolator(final Interpolator1D timeInterpolator, final boolean useLogTime, final boolean useIntegratedVariance, final boolean useLogValue) { this(DEFAULT_SMILE_INTERPOLATOR, timeInterpolator, useLogTime, useIntegratedVariance, useLogValue); } public VolatilitySurfaceInterpolator(final GeneralSmileInterpolator smileInterpolator, final Interpolator1D timeInterpolator, final boolean useLogTime, final boolean useIntegratedVariance, final boolean useLogValue) { ArgumentChecker.notNull(smileInterpolator, "null smile interpolator"); ArgumentChecker.notNull(timeInterpolator, "null time interpolator"); _smileInterpolator = smileInterpolator; _timeInterpolator = timeInterpolator; _useLogTime = useLogTime; _useIntegratedVariance = useIntegratedVariance; _useLogVar = useLogValue; if (_useIntegratedVariance && _useLogVar != _useLogTime) { LOGGER.warn("The combination of useIntegratedVariance = true, useLogTime != useLogValue can produce very bad results, including considerable dips between " + "points at the same level (all other combinations give a flat line), and thus should be avoided."); } } //TODO add new constructor pattern using builder to set options, as in EquityVarianceSwapPricer public Function1D<Double, Double>[] getIndependentSmileFits(final SmileSurfaceDataBundle marketData) { ArgumentChecker.notNull(marketData, "market data"); final int n = marketData.getNumExpiries(); final double[] forwards = marketData.getForwards(); final double[][] strikes = marketData.getStrikes(); final double[] expiries = marketData.getExpiries(); final double[][] vols = marketData.getVolatilities(); //fit each smile independently @SuppressWarnings("unchecked") final Function1D<Double, Double>[] smileFunctions = new Function1D[n]; for (int i = 0; i < n; i++) { smileFunctions[i] = _smileInterpolator.getVolatilityFunction(forwards[i], strikes[i], expiries[i], vols[i]); } return smileFunctions; } /** * For a given expiry and strike, perform an interpolation between either the variance (square of volatility) or integrated variances * of points with the same proxy delta (defined as d = Math.log(forward / k) / Math.sqrt(t)) on the fitted smiles.<p> * Each smile is fitted independently using the supplied GeneralSmileInterpolator (the default is SmileInterpolatorSABR), which produces a curve (the smile) that * fits all the market implied volatilities and has sensible extrapolation behaviour. <p> * The interpolation in the time direction uses the supplied interpolator (default is natural cubic spline) using the four nearest points. There is no guarantees of a * monotonically increasing integrated variance (hence calendar arbitrage or negative local volatility are possible), but using log time to better space out the x-points helps. * @param marketData The mark data - contains the forwards, expiries, and strikes and (market) implied volatilities at each expiry, not null * @return Implied volatility surface parameterised by time and moneyness */ public BlackVolatilitySurfaceMoneynessFcnBackedByGrid getVolatilitySurface(final SmileSurfaceDataBundle marketData) { ArgumentChecker.notNull(marketData, "market data"); final Function1D<Double, Double>[] smileFunctions = getIndependentSmileFits(marketData); return combineIndependentSmileFits(smileFunctions, marketData); } /** * Given a set of smiles in the moneyness dimension, produce surface function that additionally interpolates in expiry. <p> * Access to the individual parts of getVolatilitySurface() permits user to bump vols without having to recalibrate each independent smile * @param smileFunctions Array of Function1D's, one per expiry, that return volatility given strike * @param marketData The mark data - contains the forwards, expiries, and strikes and (market) implied volatilities at each expiry, not null * @return Implied volatility surface parameterised by time and moneyness */ public BlackVolatilitySurfaceMoneynessFcnBackedByGrid combineIndependentSmileFits(final Function1D<Double, Double>[] smileFunctions, final SmileSurfaceDataBundle marketData) { ArgumentChecker.notNull(marketData, "market data"); ArgumentChecker.isTrue(marketData.getNumExpiries() > 0, "Do not have market data for any expiry"); final int n = marketData.getNumExpiries(); final double[] forwards = marketData.getForwards(); final double[] expiries = marketData.getExpiries(); double[] temp = null; if (_useLogTime) { temp = new double[n]; for (int i = 0; i < n; i++) { temp[i] = Math.log(expiries[i]); } } else { temp = expiries; } final double[] xValues = temp; final Function<Double, Double> surFunc = new Function<Double, Double>() { @SuppressWarnings("synthetic-access") @Override public Double evaluate(final Double... tm) { final double t = tm[0]; final double m = tm[1]; // Case 1: Only a single expiry is available if (n == 1) { return smileFunctions[0].evaluate(forwards[0] * m); } // Case 2 & 3: Extrapolation OR Less than 4 Expiries => Linear Extrapolation / Interpolation // FIXME Casey 15-01-2015 Extrapolation is hardcoded, to Linear.Should take input from _timeInterpolator // FIXME If n < 4, time interpolation is hardcoded, also to be linear. final int index = SurfaceArrayUtils.getLowerBoundIndex(expiries, t); if (index == 0 || index == (n - 1) || n < 4) { int lowIdx; if (index == 0) { lowIdx = 0; } else if (index == n - 1) { lowIdx = n - 2; } else { lowIdx = index; } final double x = _useLogTime ? Math.log(t) : t; final double k0 = forwards[lowIdx] * Math.pow(m, Math.sqrt(expiries[lowIdx] / t)); final double k1 = forwards[lowIdx + 1] * Math.pow(m, Math.sqrt(expiries[lowIdx + 1] / t)); double var0 = square(smileFunctions[lowIdx].evaluate(k0)); double var1 = square(smileFunctions[lowIdx + 1].evaluate(k1)); if (_useIntegratedVariance) { var0 *= expiries[lowIdx]; var1 *= expiries[lowIdx + 1]; } if (_useLogVar) { var0 = Math.log(var0); var1 = Math.log(var1); } final double dt = xValues[lowIdx + 1] - xValues[lowIdx]; double var = ((xValues[lowIdx + 1] - x) * var0 + (x - xValues[lowIdx]) * var1) / dt; if (_useLogVar) { var = Math.exp(var); if (var < 0.0) { var0 = Math.exp(var0); var1 = Math.exp(var1); } } if (var >= 0.0) { return Math.sqrt(var / (_useIntegratedVariance ? t : 1.0)); } else { return Math.sqrt(Math.min(var0, var1) / (_useIntegratedVariance ? t : 1.0)) ; } } // Case 4: Interpolation when n >= 4 //FIXME Time interpolator hard-coded to be a natural cubic spline when n > 3 int lower; if (index == 0) { lower = 0; } else if (index == n - 2) { lower = index - 2; } else if (index == n - 1) { lower = index - 3; } else { lower = index - 1; } final double[] xData = Arrays.copyOfRange(xValues, lower, lower + 4); final double x = _useLogTime ? Math.log(t) : t; final double[] yData = new double[4]; for (int i = 0; i < 4; i++) { final double time = expiries[lower + i]; final double k = forwards[lower + i] * Math.pow(m, Math.sqrt(time / t)); double y = square(smileFunctions[lower + i].evaluate(k)); if (_useIntegratedVariance) { y *= time; } yData[i] = _useLogVar ? Math.log(y) : y; } final Interpolator1DDataBundle db = _timeInterpolator.getDataBundle(xData, yData); final double tRes = _timeInterpolator.interpolate(db, x); final double yValue = _useLogVar ? Math.exp(tRes) : tRes; final double res = Math.sqrt(yValue / (_useIntegratedVariance ? t : 1.0)); return res; } public Object writeReplace() { return new InvokedSerializedForm(VolatilitySurfaceInterpolator.this, "getVolatilitySurface", marketData); } }; return new BlackVolatilitySurfaceMoneynessFcnBackedByGrid(FunctionalDoublesSurface.from(surFunc), marketData.getForwardCurve(), marketData, VolatilitySurfaceInterpolator.this); } //TODO find a way of bumping a single point without recalibrating all unaffected smiles public BlackVolatilitySurfaceMoneynessFcnBackedByGrid getBumpedVolatilitySurface(final SmileSurfaceDataBundle marketData, final int expiryIndex, final int strikeIndex, final double amount) { ArgumentChecker.notNull(marketData, "marketData"); final SmileSurfaceDataBundle bumpedData = marketData.withBumpedPoint(expiryIndex, strikeIndex, amount); return getVolatilitySurface(bumpedData); } public boolean useLogTime() { return _useLogTime; } public boolean useIntegratedVariance() { return _useIntegratedVariance; } public boolean useLogValue() { return _useLogVar; } public Interpolator1D getTimeInterpolator() { return _timeInterpolator; } public GeneralSmileInterpolator getSmileInterpolator() { return _smileInterpolator; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + _smileInterpolator.hashCode(); result = prime * result + _timeInterpolator.hashCode(); result = prime * result + (_useIntegratedVariance ? 1231 : 1237); result = prime * result + (_useLogTime ? 1231 : 1237); result = prime * result + (_useLogVar ? 1231 : 1237); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final VolatilitySurfaceInterpolator other = (VolatilitySurfaceInterpolator) obj; if (!ObjectUtils.equals(_smileInterpolator, other._smileInterpolator)) { return false; } if (!ObjectUtils.equals(_timeInterpolator, other._timeInterpolator)) { return false; } if (_useIntegratedVariance != other._useIntegratedVariance) { return false; } if (_useLogTime != other._useLogTime) { return false; } if (_useLogVar != other._useLogVar) { return false; } return true; } @Override public String toString() { final StringBuilder sb = new StringBuilder("VolatilitySurfaceInterpolator[time interpolator="); sb.append(_timeInterpolator.toString()); sb.append(", smile interpolator="); sb.append(_smileInterpolator.toString()); sb.append(" using "); sb.append(_useIntegratedVariance ? "integrated variance, " : " variance, "); sb.append(_useLogTime ? " log time and " : " linear time and "); sb.append(_useLogVar ? " log y" : " linear y"); sb.append("]"); return sb.toString(); } }
package com.opengamma.financial.analytics.model.volatility.surface; import it.unimi.dsi.fastutil.doubles.DoubleArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Set; import javax.time.calendar.Period; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceData; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.ComputationTargetType; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.analytics.model.InstrumentTypeProperties; import com.opengamma.financial.analytics.volatility.surface.DefaultVolatilitySurfaceShiftFunction; import com.opengamma.financial.analytics.volatility.surface.SurfaceQuoteType; import com.opengamma.financial.analytics.volatility.surface.VolatilitySurfaceShiftFunction; import com.opengamma.financial.model.option.definition.SmileDeltaParameter; import com.opengamma.financial.model.option.definition.SmileDeltaTermStructureParameter; import com.opengamma.util.money.UnorderedCurrencyPair; import com.opengamma.util.time.Tenor; public class ForexCallDeltaVolatilitySurfaceFunction extends AbstractFunction.NonCompiledInvoker { private static final Logger s_logger = LoggerFactory.getLogger(ForexCallDeltaVolatilitySurfaceFunction.class); @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) { final String surfaceName = desiredValues.iterator().next().getConstraint(ValuePropertyNames.SURFACE); final ValueRequirement surfaceRequirement = getDataRequirement(surfaceName, target); final Object volatilitySurfaceObject = inputs.getValue(surfaceRequirement); if (volatilitySurfaceObject == null) { throw new OpenGammaRuntimeException("Could not get " + surfaceRequirement); } @SuppressWarnings("unchecked") final VolatilitySurfaceData<Tenor, Double> fxVolatilitySurface = (VolatilitySurfaceData<Tenor, Double>) volatilitySurfaceObject; final Tenor[] tenors = fxVolatilitySurface.getXs(); final Double[] deltaValues = fxVolatilitySurface.getYs(); Arrays.sort(tenors); Arrays.sort(deltaValues); final int nPoints = tenors.length; final SmileDeltaParameter[] smile = new SmileDeltaParameter[nPoints]; final int nSmileValues = deltaValues.length; final Set<String> shifts = desiredValues.iterator().next().getConstraints().getValues(VolatilitySurfaceShiftFunction.SHIFT); final double shiftMultiplier; if ((shifts != null) && (shifts.size() == 1)) { final String shift = shifts.iterator().next(); shiftMultiplier = 1 + Double.parseDouble(shift); } else { shiftMultiplier = 1; } for (int i = 0; i < tenors.length; i++) { final Tenor tenor = tenors[i]; final double t = getTime(tenor); final DoubleArrayList deltas = new DoubleArrayList(); final DoubleArrayList volatilities = new DoubleArrayList(); for (int j = 0; j < nSmileValues; j++) { final Double delta = deltaValues[j]; if (delta != null) { Double volatility = fxVolatilitySurface.getVolatility(tenor, delta); if (volatility != null) { volatility *= shiftMultiplier; if (delta < 50) { deltas.add(delta / 100); } volatilities.add(volatility); } } else { s_logger.info("Had a null value for tenor number " + j); } } smile[i] = new SmileDeltaParameter(t, deltas.toDoubleArray(), volatilities.toDoubleArray()); } final SmileDeltaTermStructureParameter smiles = new SmileDeltaTermStructureParameter(smile); final ValueProperties.Builder resultProperties = createValueProperties() .with(ValuePropertyNames.SURFACE, surfaceName) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.FOREX); if (shifts != null) { resultProperties.with(VolatilitySurfaceShiftFunction.SHIFT, shifts); } return Collections.<ComputedValue>singleton(new ComputedValue(new ValueSpecification(ValueRequirementNames.STANDARD_VOLATILITY_SURFACE_DATA, target.toSpecification(), resultProperties.get()), smiles)); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.PRIMITIVE; } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { if (target.getType() != ComputationTargetType.PRIMITIVE) { return false; } if (UnorderedCurrencyPair.OBJECT_SCHEME.equals(target.getUniqueId().getScheme())) { return true; } return false; } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final Set<String> surfaceNames = desiredValue.getConstraints().getValues(ValuePropertyNames.SURFACE); if (surfaceNames == null || surfaceNames.size() != 1) { throw new OpenGammaRuntimeException("Need one surface name; have " + surfaceNames); } final String surfaceName = surfaceNames.iterator().next(); return Collections.<ValueRequirement>singleton(getDataRequirement(surfaceName, target)); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { final ValueProperties.Builder resultProperties = createValueProperties() .withAny(ValuePropertyNames.SURFACE) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.FOREX); if (context.getViewCalculationConfiguration() != null) { final Set<String> shifts = context.getViewCalculationConfiguration().getDefaultProperties().getValues(DefaultVolatilitySurfaceShiftFunction.VOLATILITY_SURFACE_SHIFT); if ((shifts != null) && (shifts.size() == 1)) { resultProperties.with(VolatilitySurfaceShiftFunction.SHIFT, shifts.iterator().next()); } } return Collections.<ValueSpecification>singleton(new ValueSpecification(ValueRequirementNames.STANDARD_VOLATILITY_SURFACE_DATA, target.toSpecification(), resultProperties.get())); } private double getTime(final Tenor tenor) { final Period period = tenor.getPeriod(); if (period.getYears() != 0) { return period.getYears(); } if (period.getMonths() != 0) { return ((double) period.getMonths()) / 12; } if (period.getDays() != 0) { return ((double) period.getDays()) / 365; } throw new OpenGammaRuntimeException("Should never happen"); } private ValueRequirement getDataRequirement(final String surfaceName, final ComputationTarget target) { return new ValueRequirement(ValueRequirementNames.VOLATILITY_SURFACE_DATA, target.toSpecification(), ValueProperties.builder() .with(ValuePropertyNames.SURFACE, surfaceName) .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE, InstrumentTypeProperties.FOREX) .with(SurfaceQuoteType.PROPERTY_SURFACE_QUOTE_TYPE, SurfaceQuoteType.CALL_DELTA).get()); } }
package no.group09.utils; import no.group09.arduinoair.R; import no.group09.database.Save; import no.group09.database.objects.App; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.app.ProgressDialog; import android.content.Context; import android.os.Bundle; import android.os.Handler; import android.util.Log; import android.view.View; import android.widget.RatingBar; import android.widget.TextView; import android.content.DialogInterface; public class AppView extends Activity { ProgressDialog progressBar; private int progressStatus = 0; private Handler progressHandler = new Handler(); private Save save; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); //Set the xml layout setContentView(R.layout.app_view); //Fetch the application ID from the intent int appID = getIntent().getExtras().getInt("app"); //Get the database save = new Save(getBaseContext()); //Open the database save.open(); //Fetch the application from the database App app = save.getApp(appID); TextView appName = (TextView) findViewById(R.id.app_view_app_name); TextView appDeveloper = (TextView) findViewById(R.id.app_view_developer); RatingBar rating = (RatingBar) findViewById(R.id.ratingBarIndicator); appName.setText(app.getName()); appDeveloper.setText(String.valueOf(app.getDeveloperID())); //TODO: Get the developer from the database on this ID rating.setRating((float)app.getRating()); //TODO: get all the information from the database. } // method for handling click of the review button public void reviewClicked(View view){ // AlertDialog.Builder builder = new AlertDialog.Builder(this); // builder.setTitle("Review the app").setItem() } // method for handling the click of the install button public void installClicked(View view){ // creates an alertdialog builder AlertDialog.Builder builder = new AlertDialog.Builder(this); // if no device connected, create popup with that message // if(!getDeviceName().equals(null) builder.setMessage("Cannot install app, no device connected").setPositiveButton("Ok",new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { } }).setNegativeButton("Install anyway",new DialogInterface.OnClickListener(){ @Override public void onClick(DialogInterface dialog, int which){ // just to test if installingDialog works and if it responds well installingDialog(); } }); builder.create(); // else // builder.setMessage("Press install to install this app to " + getDeviceName().setPositiveButton("Install", new DialogInterface.OnClickListener(){ // @Override // public void onClick(DialogInterface dialog, int which){ // }).setNegativeButton("Cancel", new DialogInterface.OnClickListener(){ // @Override // public void onClick(DialogInterface dialog, int which){ // builder.create(); builder.show(); } public void installingDialog(){ // preparing progress bar dialog progressBar = new ProgressDialog(this); progressBar.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressBar.setCancelable(false); progressBar.setTitle("Installing app to " + getDeviceName()); progressBar.setMessage("DO NOT DISCONNECT OR MOVE AWAY FROM THE DEVICE!"); progressBar.setProgress(0); progressBar.setMax(100); progressBar.show(); // reset progress bar status, just in case... progressStatus = 0; new Thread(new Runnable() { @Override public void run() { while(progressStatus < 100){ // TODO: call method to update progressStatus progressStatus++; // to make sure thread doesnt use up too much resources try { Thread.sleep(1000); } catch (Exception e) { // TODO: handle exception } // update the progress bar itself progressHandler.post(new Runnable() { @Override public void run() { progressBar.setProgress(progressStatus); } }); } // the progress is done if(progressStatus >= 100){ // sleep so the user can see the 100% mark try { Thread.sleep(1500); } catch (Exception e) { // TODO: handle exception } progressBar.dismiss(); } } }).start(); // TODO: create progressbar, or cancel after feedback from device } private String getDeviceName(){ // TODO: implement this method of getting the connected device name return null; } }
package org.slc.sli.ingestion.referenceresolution; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.ws.Holder; import junit.framework.Assert; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.junit.Test; import org.xml.sax.SAXException; import org.slc.sli.ingestion.IngestionTest; /** * * @author tke * */ public class FreeMarkerExtendedReferenceResolverTest { FreeMarkerExtendedReferenceResolver referenceFactory = new FreeMarkerExtendedReferenceResolver(); private void test(File content, File expected, String xpath) throws IOException { File result = null; try { result = referenceFactory.resolve(xpath, content); Assert.assertNotNull(result); String expectedXML = readFromFile(expected); String actualXML = readFromFile(result); expectedXML = expectedXML.replaceAll("\\n|\\r", ""); expectedXML = expectedXML.replaceAll("\\s+", ""); actualXML = actualXML.replaceAll("\\n|\\r", ""); actualXML = actualXML.replaceAll("\\s+", ""); Assert.assertEquals(expectedXML, actualXML); } finally { if (result != null) { result.delete(); } } } private String readFromFile(File file) throws IOException { FileReader reader = null; try { reader = new FileReader(file); List<String> lines = IOUtils.readLines(reader); return StringUtils.join(lines, '\n'); } finally { IOUtils.closeQuietly(reader); } } @Test public void testResolution() throws IOException, SAXException { final File input = IngestionTest.getFile("idRefResolutionData/InterchangeStudentParent/StudentReference_input.xml"); final File expected = IngestionTest.getFile("idRefResolutionData/InterchangeStudentParent/StudentReference_output.xml"); Map<String, String> config = new HashMap<String, String>(); config.put("/InterchangeStudentParent/StudentParentAssociation/StudentReference", "idRefResolution/InterchangeStudentParent/StudentParentAssociation/StudentReference.ftl"); referenceFactory.setIdRefConfigs(config); test(input, expected, "/InterchangeStudentParent/StudentParentAssociation/StudentReference"); final Holder<Boolean> exceptionThrown = new Holder<Boolean>(Boolean.FALSE); Runnable run = new Runnable() { @Override public void run() { try { test(input, expected, "/InterchangeStudentParent/StudentParentAssociation/StudentReference"); } catch (Throwable t) { exceptionThrown.value = Boolean.TRUE; throw new RuntimeException(t); } } }; Thread th1 = new Thread(run, "Thread1"); Thread th2 = new Thread(run, "Thread2"); th1.start(); th2.start(); try { th1.join(); } catch (InterruptedException e) { e.printStackTrace(); Assert.fail(); } try { th2.join(); } catch (InterruptedException e) { e.printStackTrace(); Assert.fail(); } Assert.assertFalse(exceptionThrown.value); } @Test public void testBadConfiguration() { Map<String, String> config = new HashMap<String, String>(); referenceFactory.setIdRefConfigs(config); config.put("/InterchangeStudentAssessment/StudentAssessment/AssessmentReference", "idRefResolution/InterchangeAssessmentMetadata/Assessment/AssessmentFamilyReference.ftl"); Assert.assertNull(referenceFactory.resolve("/InterchangeStudentAssessment/StudentAssessment/AssessmentReference2", null)); } }
package org.jboss.as.weld; import java.util.Set; import org.jboss.as.weld.discovery.AnnotationType; import org.jboss.jandex.DotName; import com.google.common.base.Function; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableSet; /** * Class that stores the {@link DotName}s of CDI annotations. * */ public enum CdiAnnotations { /** * javax.decorator.Decorator CDI annotation. */ DECORATOR(Constants.JAVAX_DECORATOR, "Decorator"), /** * javax.decorator.Delegate CDI annotation. */ DELEGATE(Constants.JAVAX_DECORATOR, "Delegate"), /** * javax.enterprise.context.ApplicationScoped CDI annotation. */ APP_SCOPED(Constants.JAVAX_ENT_CONTEXT, "ApplicationScoped"), /** * javax.enterprise.context.ConversationScoped CDI annotation. */ CONV_SCOPED(Constants.JAVAX_ENT_CONTEXT, "ConversationScoped"), /** * javax.enterprise.context.RequestScoped CDI annotation. */ REQ_SCOPED(Constants.JAVAX_ENT_CONTEXT, "RequestScoped"), /** * javax.enterprise.context.SessionScoped CDI annotation. */ SESS_SCOPED(Constants.JAVAX_ENT_CONTEXT, "SessionScoped"), /** * javax.enterprise.context.NormalScope CDI annotation. */ NORM_SCOPE(Constants.JAVAX_ENT_CONTEXT, "NormalScope"), /** * javax.enterprise.context.Dependent CDI annotation. */ DEPENDENT(Constants.JAVAX_ENT_CONTEXT, "Dependent"), /** * javax.inject.Singleton annotation. */ SINGLETON(Constants.JAVAX_INJ, "Singleton"), /** * javax.enterprise.event.Observes CDI annotation. */ OBSERVES(Constants.JAVAX_ENT_EVT, "Observes"), /** * javax.enterprise.inject.Alternative CDI annotation. */ ALTERNATIVE(Constants.JAVAX_ENT_INJ, "Alternative"), /** * javax.enterprise.inject.Any CDI annotation. */ ANY(Constants.JAVAX_ENT_INJ, "Any"), /** * javax.enterprise.inject.Default CDI annotation. */ DEFAULT(Constants.JAVAX_ENT_INJ, "Default"), /** * javax.enterprise.inject.Disposes CDI annotation. */ DISPOSES(Constants.JAVAX_ENT_INJ, "Disposes"), /** * javax.enterprise.inject.Model CDI annotation. */ MODEL(Constants.JAVAX_ENT_INJ, "Model"), /** * javax.enterprise.inject.New CDI annotation. */ NEW(Constants.JAVAX_ENT_INJ, "New"), /** * javax.enterprise.inject.Produces CDI annotation. */ PRODUCES(Constants.JAVAX_ENT_INJ, "Produces"), /** * javax.enterprise.inject.Specializes CDI annotation. */ SPECIALIZES(Constants.JAVAX_ENT_INJ, "Specializes"), /** * javax.enterprise.inject.Stereotype CDI annotation. */ STEREOTYPE(Constants.JAVAX_ENT_INJ, "Stereotype"), /** * javax.enterprise.inject.Typed CDI annotation. */ TYPED(Constants.JAVAX_ENT_INJ, "Typed"); /** * CDI annotation name. */ private final String simpleName; /** * CDI annotation fully qualified name. */ private final DotName dotName; /** * Constructor. * * @param prefix qualified name part * @param simpleName simple class name */ private CdiAnnotations(final DotName prefix, final String simpleName) { this.simpleName = simpleName; this.dotName = DotName.createComponentized(prefix, simpleName); } /** * this can't go on the enum itself. */ private static class Constants { /** * javax.decorator package. */ public static final DotName JAVAX_DECORATOR = DotName.createSimple("javax.decorator"); /** * javax.enterprise.context package. */ public static final DotName JAVAX_ENT_CONTEXT = DotName.createSimple("javax.enterprise.context"); /** * javax.enterprise.event package. */ public static final DotName JAVAX_ENT_EVT = DotName.createSimple("javax.enterprise.event"); /** * javax.enterprise.inject package. */ public static final DotName JAVAX_ENT_INJ = DotName.createSimple("javax.enterprise.inject"); /** * javax.inject package. */ public static final DotName JAVAX_INJ = DotName.createSimple("javax.inject"); } /** * @return fully qualified name */ public DotName getDotName() { return dotName; } /** * @return simple name */ public String getSimpleName() { return simpleName; } public static final DotName SCOPE = DotName.createComponentized(Constants.JAVAX_INJ, "Scope"); public static final Set<DotName> BUILT_IN_SCOPE_NAMES = ImmutableSet.<DotName>of(DEPENDENT.getDotName(), REQ_SCOPED.getDotName(), CONV_SCOPED.getDotName(), SESS_SCOPED.getDotName(), APP_SCOPED.getDotName(), SINGLETON.getDotName()); public static final Set<AnnotationType> BUILT_IN_SCOPES = ImmutableSet.copyOf(Collections2.transform(BUILT_IN_SCOPE_NAMES, new Function<DotName, AnnotationType>() { public AnnotationType apply(DotName input) { return new AnnotationType(input, true); } })); }
package biz.playr; import java.util.UUID; import biz.playr.R; import android.content.ComponentName; import android.content.ServiceConnection; import android.net.Uri; import android.os.Bundle; import android.os.IBinder; import android.util.Log; import android.view.Window; import android.view.WindowManager; import android.annotation.SuppressLint; import android.app.Activity; import android.app.ActionBar; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.view.View; import android.os.Handler; import android.webkit.ConsoleMessage; import android.webkit.WebChromeClient; import android.webkit.WebSettings.PluginState; import android.webkit.WebView; import android.webkit.WebSettings; import android.webkit.WebViewClient; //import java.lang.Thread.UncaughtExceptionHandler; //import android.content.Intent; public class MainActivity extends Activity implements IServiceCallbacks { private WebView webView = null; private static final String className = "biz.playr.MainActivity"; private CheckRestartService checkRestartService; private boolean bound = false; // Callbacks for service binding, passed to bindService() private ServiceConnection serviceConnection = new ServiceConnection() { private static final String className = "ServiceConnection"; @Override public void onServiceConnected(ComponentName componentName, IBinder service) { Log.i(className, " override ServiceConnection.onServiceConnected"); // cast the IBinder and get CheckRestartService instance biz.playr.CheckRestartService.LocalBinder binder = (biz.playr.CheckRestartService.LocalBinder) service; checkRestartService = binder.getService(); bound = true; checkRestartService.setCallbacks(MainActivity.this); // bind IServiceCallbacks Log.i(className, " ServiceConnection.onServiceConnected: service bound"); } @Override public void onServiceDisconnected(ComponentName componentName) { Log.i(className, " override ServiceConnection.onServiceDisconnected"); bound = false; } }; @Override protected void onCreate(Bundle savedInstanceState) { Log.i(className, "override onCreate"); super.onCreate(savedInstanceState); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); requestWindowFeature(Window.FEATURE_NO_TITLE); // Setup restarting of the app when it crashes Log.i(className, "onCreate: setup restarting of app on crash"); Thread.setDefaultUncaughtExceptionHandler(new DefaultExceptionHandler(this)); // Test exception handling by throwing an exception 20 seconds from now // Handler handler = new Handler(); // handler.postDelayed(new Runnable() { // public void run() { // }, 20000); // Setup visibility of system bars View decorView = getWindow().getDecorView(); decorView .setOnSystemUiVisibilityChangeListener(new View.OnSystemUiVisibilityChangeListener() { @Override public void onSystemUiVisibilityChange(int visibility) { // Note that system bars will only be "visible" if none of the // LOW_PROFILE, HIDE_NAVIGATION, or FULLSCREEN flags are set. if ((visibility & View.SYSTEM_UI_FLAG_FULLSCREEN) == 0) { // bars are visible => user touched the screen, make // the bars disappear again in 2 seconds Handler handler = new Handler(); handler.postDelayed(new Runnable() { public void run() { hideBars(); } }, 2000); } else { // The system bars are NOT visible => do nothing } } }); decorView.setKeepScreenOn(true); setContentView(R.layout.activity_main); String playerId = getStoredPlayerId(); if (playerId == null || playerId.length() == 0) { playerId = UUID.randomUUID().toString(); storePlayerId(playerId); Log.i(className, "generated and stored playerId: " + playerId); } else { Log.i(className, "retrieved stored playerId: " + playerId); } // Setup webView webView = (WebView) findViewById(R.id.mainUiView); Log.i(className, "webView is " + (webView == null ? "null" : "not null")); setupWebView(webView); webView.setWebChromeClient(new WebChromeClient() { private String className = "WebChromeClient"; // private int count = 0; @Override public void onShowCustomView(View view, CustomViewCallback callback) { Log.i(className, "override setWebChromeClient"); super.onShowCustomView(view, callback); } @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { // Log.i(className,"override onConsoleMessage: " + // consoleMessage.message()); // count++; // if (count == 10) { // Log.i(className,">>>>>> override onConsoleMessage, throw Exception"); // } else { // Log.i(className,">>>>>> override onConsoleMessage, count = " + count); return super.onConsoleMessage(consoleMessage); } }); webView.setWebViewClient(new WebViewClient() { private static final String className = "biz.playr.WebViewClient"; public boolean shouldOverrideUrlLoading(WebView view, String url) { // Return false from the callback instead of calling view.loadUrl // instead. Calling loadUrl introduces a subtle bug where if you // have any iframe within the page with a custom scheme URL // (say <iframe src="tel:123"/>) it will navigate your app's // main frame to that URL most likely breaking the app as a side // effect. return false; // then it is not handled by default action } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { Log.i(className, "override onReceivedError"); // Toast.makeText(getActivity(), "WebView Error" + // description(), Toast.LENGTH_SHORT).show(); Log.e(className, "WebView(Client) error: " + description + " code: " + String.valueOf(errorCode) + " URL: " + failingUrl); Log.e(className, "===>>> !!! WebViewClient.onReceivedError Reloading Webview !!! <<<==="); // super.onReceivedError(view, errorCode, description, // failingUrl); view.reload(); } /* * Added in API level 23 (use these when we set * android:targetSdkVersion to 23) * * @Override public void onReceivedError(WebView view, * WebResourceRequest request, WebResourceError error) { * Toast.makeText(getActivity(), "WebView Error" + * error.getDescription(), Toast.LENGTH_SHORT).show(); * super.onReceivedError(view, request, error); } * * @Override public void onReceivedHttpError(WebView view, * WebResourceRequest request, WebResourceResponse errorResponse) { * Toast.makeText(getActivity(), "WebView Error" + * errorResponse.getReasonPhrase(), Toast.LENGTH_SHORT).show(); * * super.onReceivedHttpError(view, request, errorResponse); } */ }); webView.setKeepScreenOn(true); String webviewUserAgent = webView.getSettings().getUserAgentString(); String webviewVersion = "Android System WebView not installed"; String appVersion = "app version not found"; PackageManager pm = getPackageManager(); PackageInfo pi; try { pi = pm.getPackageInfo("com.google.android.webview", 0); if (pi != null) { webviewVersion = "Version name: " + pi.versionName + " Version code: " + pi.versionCode; } } catch (PackageManager.NameNotFoundException e) { Log.e(className, "Android System WebView is not found"); } try { pi = pm.getPackageInfo(getPackageName(), 0); if (pi != null) { appVersion = pi.versionName; } } catch (PackageManager.NameNotFoundException e) { Log.e(className, getPackageName() + " is not found"); } if (savedInstanceState == null) { String pageUrl = Uri .parse("playr_loader.html") .buildUpon() .appendQueryParameter("player_id", playerId) .appendQueryParameter("webview_user_agent", webviewUserAgent) .appendQueryParameter("webview_version", webviewVersion) .appendQueryParameter("app_version", appVersion).build() .toString(); String initialHtmlPage = "<html><head><script type=\"text/javascript\" charset=\"utf-8\">window.location = \"" + pageUrl + "\"</script><head><body/></html>"; webView.loadDataWithBaseURL("file:///android_asset/", initialHtmlPage, "text/html", "UTF-8", null); } } public void handleUncaughtException(Thread paramThread, Throwable paramThrowable) { Log.e(className, "handleUncaughtException; paramThread: " + paramThread + ", paramThrowable: " + paramThrowable); // restartActivity(); recreate(); } public void restartDelayed() { Log.i(className, "restartDelayed"); // PendingIntent localPendingIntent = PendingIntent.getActivity(this, 0, new Intent(this, MainActivity.class), PendingIntent.FLAG_ONE_SHOT); Intent activityIntent = new Intent(this.getBaseContext(), biz.playr.MainActivity.class); activityIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK); activityIntent.setAction(Intent.ACTION_MAIN); activityIntent.addCategory(Intent.CATEGORY_LAUNCHER); PendingIntent localPendingIntent = PendingIntent.getActivity(this.getBaseContext(), 0, activityIntent, PendingIntent.FLAG_ONE_SHOT); ((AlarmManager)getSystemService(Context.ALARM_SERVICE)).set(0, System.currentTimeMillis() + DefaultExceptionHandler.restartDelay, localPendingIntent); Log.i(className, "restartDelayed: end"); } // implement the IServiceCallbacks interface public void restartActivityWithDelay() { this.restartActivity(); } public String getPlayerId() { return getStoredPlayerId(); } // end of implementation IServiceCallbacks public void restartActivity() { Log.i(className, "restartActivity"); // the context of the activityIntent might need to be the running PlayrService // keep the Intent in sync with the Manifest and DefaultExceptionHandler Intent activityIntent = new Intent(this.getBaseContext(), biz.playr.MainActivity.class); activityIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_NEW_TASK); activityIntent.setAction(Intent.ACTION_MAIN); activityIntent.addCategory(Intent.CATEGORY_LAUNCHER); // startActivity(activityIntent); // delay start so this activity can be ended before the new one starts PendingIntent pendingIntent = PendingIntent.getActivity( this.getBaseContext(), 0, activityIntent, PendingIntent.FLAG_ONE_SHOT); // Following code will restart application after <delay> seconds AlarmManager mgr = (AlarmManager) biz.playr.MainApplication .getInstance().getBaseContext() .getSystemService(Context.ALARM_SERVICE); mgr.set(AlarmManager.RTC_WAKEUP, System.currentTimeMillis() + DefaultExceptionHandler.restartDelay, pendingIntent); Log.i(className, "restartActivity: killing this process"); setResult(RESULT_OK); finish(); android.os.Process.killProcess(android.os.Process.myPid()); // System.exit(2); } @SuppressLint("SetJavaScriptEnabled") /* Configure the Webview for usage as the application's window. */ private void setupWebView(WebView webView) { Log.i(className, "setupWebView"); WebSettings webSettings = webView.getSettings(); webSettings.setJavaScriptEnabled(true); if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.JELLY_BEAN) { webSettings.setMediaPlaybackRequiresUserGesture(false); } webSettings.setJavaScriptCanOpenWindowsAutomatically(true); webSettings.setLoadWithOverviewMode(true); webSettings.setUseWideViewPort(true); webSettings.setAllowFileAccess(true); if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) { webSettings.setAllowUniversalAccessFromFileURLs(true); } webSettings.setBuiltInZoomControls(false); webSettings.setSupportZoom(false); if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.KITKAT) { webSettings.setPluginState(PluginState.ON); } } @Override protected void onSaveInstanceState(Bundle outState) { Log.i(className, "override onSaveInstanceState"); super.onSaveInstanceState(outState); webView.saveState(outState); } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { Log.i(className, "override onRestoreInstanceState"); super.onRestoreInstanceState(savedInstanceState); if (savedInstanceState != null && !savedInstanceState.isEmpty()) { webView.restoreState(savedInstanceState); } } @Override protected void onResume() { Log.i(className, "override onResume"); super.onResume(); hideBars(); webView.onResume(); } @Override protected void onStart() { Log.i(className, "override onStart"); super.onStart(); // bind to CheckRestartService Intent intent = new Intent(this, CheckRestartService.class); bindService(intent, serviceConnection, Context.BIND_AUTO_CREATE); // bindService(intent, serviceConnection, Context.BIND_IMPORTANT); // bindService(intent, serviceConnection, Context.BIND_ABOVE_CLIENT); Log.i(className, "onStart: service bound (auto create)"); } @Override protected void onRestart() { Log.i(className, "override onRestart"); super.onRestart(); } @Override protected void onPause() { Log.i(className, "override onPause"); webView.onPause(); super.onPause(); } protected void onStop() { Log.i(className, "override onStop"); // Unbind from service if (bound) { checkRestartService.setCallbacks(null); // unregister unbindService(serviceConnection); bound = false; } // The application is pushed into the background // This method is also called when the device is turned (portrait/landscape // switch) and will result in repeated restart of the app // detecting rotation to prevent unnecessary calls to restartDelayed is // supposed to be complex and may require logic that spans onStop and onCreate // since these are called by the Android system when the screen is rotated // restartDelayed(); super.onStop(); } @Override protected void onDestroy() { Log.i(className, "override onDestroy"); // since onDestroy is called when the device changes aspect ratio // (which is possible on tablets) this method cannot be used to force // a restart of the application when this method is called. // Having this logic here causes a restart loop when the device changes // aspect the ratio. // Log.e(className,".onDestroy: Prepare to restart the app."); // Intent intent = new Intent(this, biz.playr.MainActivity.class); // intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP // | Intent.FLAG_ACTIVITY_CLEAR_TASK // | Intent.FLAG_ACTIVITY_NEW_TASK); // PendingIntent pendingIntent = PendingIntent.getActivity( // biz.playr.MainApplication.getInstance().getBaseContext(), 0, intent, intent.getFlags()); // //Following code will restart your application after <delay> seconds // AlarmManager mgr = (AlarmManager) // biz.playr.MainApplication.getInstance().getBaseContext().getSystemService(Context.ALARM_SERVICE); // mgr.set(AlarmManager.RTC, System.currentTimeMillis() + // DefaultExceptionHandler.restartDelay, pendingIntent); // Log.e(className,".onDestroy: super.onDestroy() !!! About to restart application !!!"); restartDelayed(); super.onDestroy(); } @SuppressLint("InlinedApi") protected void hideBars() { if (getWindow() != null) { View decorView = getWindow().getDecorView(); // Hide both the navigation bar and the status bar. // SYSTEM_UI_FLAG_FULLSCREEN is only available on Android 4.1 and higher, but as // a general rule, you should design your app to hide the status bar whenever you // hide the navigation bar. int uiOptions = View.SYSTEM_UI_FLAG_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_STABLE; decorView.setSystemUiVisibility(uiOptions); } // Remember that you should never show the action bar if the // status bar is hidden, so hide that too if necessary. ActionBar actionBar = getActionBar(); if (actionBar != null) { actionBar.hide(); } } @Override /* Navigate the WebView's history when the user presses the Back key. */ public void onBackPressed() { if (webView != null) { if (webView.canGoBack()) { webView.goBack(); } else { super.onBackPressed(); } } else { super.onBackPressed(); } } /* * PRIVATE methods */ private String getStoredPlayerId() { SharedPreferences sharedPreferences = getPreferences(Context.MODE_PRIVATE); return sharedPreferences.getString(getString(R.string.player_id_store), ""); } private void storePlayerId(String value) { SharedPreferences sharedPreferences = getPreferences(Context.MODE_PRIVATE); SharedPreferences.Editor editor = sharedPreferences.edit(); editor.putString(getString(R.string.player_id_store), value); editor.commit(); } }
package org.opendaylight.yangtools.yang.parser.rfc7950.stmt.type; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.collect.ImmutableMap; import java.util.Collection; import org.opendaylight.yangtools.yang.common.QName; import org.opendaylight.yangtools.yang.model.api.SchemaPath; import org.opendaylight.yangtools.yang.model.api.TypeDefinition; import org.opendaylight.yangtools.yang.model.api.YangStmtMapping; import org.opendaylight.yangtools.yang.model.api.meta.EffectiveStatement; import org.opendaylight.yangtools.yang.model.api.stmt.TypeEffectiveStatement; import org.opendaylight.yangtools.yang.model.api.stmt.TypeStatement; import org.opendaylight.yangtools.yang.model.api.stmt.TypedefEffectiveStatement; import org.opendaylight.yangtools.yang.model.api.stmt.TypedefStatement; import org.opendaylight.yangtools.yang.model.api.type.BinaryTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.BitsTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.BooleanTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.DecimalTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.EmptyTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.EnumTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.IdentityrefTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.InstanceIdentifierTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Int16TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Int32TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Int64TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Int8TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.LeafrefTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.StringTypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Uint16TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Uint32TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Uint64TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.Uint8TypeDefinition; import org.opendaylight.yangtools.yang.model.api.type.UnionTypeDefinition; import org.opendaylight.yangtools.yang.model.util.type.RestrictedTypes; import org.opendaylight.yangtools.yang.parser.spi.TypeNamespace; import org.opendaylight.yangtools.yang.parser.spi.meta.AbstractStatementSupport; import org.opendaylight.yangtools.yang.parser.spi.meta.InferenceException; import org.opendaylight.yangtools.yang.parser.spi.meta.ModelActionBuilder; import org.opendaylight.yangtools.yang.parser.spi.meta.ModelActionBuilder.InferenceAction; import org.opendaylight.yangtools.yang.parser.spi.meta.ModelActionBuilder.InferenceContext; import org.opendaylight.yangtools.yang.parser.spi.meta.ModelActionBuilder.Prerequisite; import org.opendaylight.yangtools.yang.parser.spi.meta.ModelProcessingPhase; import org.opendaylight.yangtools.yang.parser.spi.meta.StatementSupport; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContext.Mutable; import org.opendaylight.yangtools.yang.parser.spi.meta.StmtContextUtils; import org.opendaylight.yangtools.yang.parser.spi.meta.SubstatementValidator; import org.opendaylight.yangtools.yang.parser.spi.source.SourceException; abstract class AbstractTypeStatementSupport extends AbstractStatementSupport<String, TypeStatement, EffectiveStatement<String, TypeStatement>> { private static final SubstatementValidator SUBSTATEMENT_VALIDATOR = SubstatementValidator.builder( YangStmtMapping.TYPE) .addOptional(YangStmtMapping.BASE) .addAny(YangStmtMapping.BIT) .addAny(YangStmtMapping.ENUM) .addOptional(YangStmtMapping.FRACTION_DIGITS) .addOptional(YangStmtMapping.LENGTH) .addOptional(YangStmtMapping.PATH) .addAny(YangStmtMapping.PATTERN) .addOptional(YangStmtMapping.RANGE) .addOptional(YangStmtMapping.REQUIRE_INSTANCE) .addAny(YangStmtMapping.TYPE) .build(); static final String BINARY = "binary"; static final String BITS = "bits"; static final String BOOLEAN = "boolean"; static final String DECIMAL64 = "decimal64"; static final String EMPTY = "empty"; static final String ENUMERATION = "enumeration"; static final String IDENTITY_REF = "identityref"; static final String INSTANCE_IDENTIFIER = "instance-identifier"; static final String INT8 = "int8"; static final String INT16 = "int16"; static final String INT32 = "int32"; static final String INT64 = "int64"; static final String LEAF_REF = "leafref"; static final String STRING = "string"; static final String UINT8 = "uint8"; static final String UINT16 = "uint16"; static final String UINT32 = "uint32"; static final String UINT64 = "uint64"; static final String UNION = "union"; private static final ImmutableMap<String, String> BUILT_IN_TYPES = ImmutableMap.<String, String>builder() .put(BINARY, BINARY) .put(BITS, BITS) .put(BOOLEAN, BOOLEAN) .put(DECIMAL64, DECIMAL64) .put(EMPTY, EMPTY) .put(ENUMERATION, ENUMERATION) .put(IDENTITY_REF,IDENTITY_REF) .put(INSTANCE_IDENTIFIER, INSTANCE_IDENTIFIER) .put(INT8, INT8) .put(INT16, INT16) .put(INT32, INT32) .put(INT64, INT64) .put(LEAF_REF, LEAF_REF) .put(STRING, STRING) .put(UINT8, UINT8) .put(UINT16, UINT16) .put(UINT32, UINT32) .put(UINT64, UINT64) .put(UNION, UNION) .build(); private static final ImmutableMap<String, StatementSupport<?, ?, ?>> ARGUMENT_SPECIFIC_SUPPORTS = ImmutableMap.<String, StatementSupport<?, ?, ?>>builder() .put(BITS, new BitsSpecificationSupport()) .put(DECIMAL64, new Decimal64SpecificationSupport()) .put(ENUMERATION, new EnumSpecificationSupport()) .put(IDENTITY_REF, new IdentityRefSpecificationRFC6020Support()) .put(INSTANCE_IDENTIFIER, new InstanceIdentifierSpecificationSupport()) .put(LEAF_REF, new LeafrefSpecificationRFC6020Support()) .put(UNION, new UnionSpecificationSupport()) .build(); AbstractTypeStatementSupport() { super(YangStmtMapping.TYPE); } @Override public final String parseArgumentValue(final StmtContext<?, ?, ?> ctx, final String value) { return value; } @Override public final TypeStatement createDeclared(final StmtContext<String, TypeStatement, ?> ctx) { return BuiltinTypeStatement.maybeReplace(new TypeStatementImpl(ctx)); } @Override public final TypeEffectiveStatement<TypeStatement> createEffective( final StmtContext<String, TypeStatement, EffectiveStatement<String, TypeStatement>> ctx) { // First look up the proper base type final TypeEffectiveStatement<TypeStatement> typeStmt; switch (ctx.coerceStatementArgument()) { case BINARY: typeStmt = BuiltinEffectiveStatement.BINARY; break; case BOOLEAN: typeStmt = BuiltinEffectiveStatement.BOOLEAN; break; case EMPTY: typeStmt = BuiltinEffectiveStatement.EMPTY; break; case INSTANCE_IDENTIFIER: typeStmt = BuiltinEffectiveStatement.INSTANCE_IDENTIFIER; break; case INT8: typeStmt = BuiltinEffectiveStatement.INT8; break; case INT16: typeStmt = BuiltinEffectiveStatement.INT16; break; case INT32: typeStmt = BuiltinEffectiveStatement.INT32; break; case INT64: typeStmt = BuiltinEffectiveStatement.INT64; break; case STRING: typeStmt = BuiltinEffectiveStatement.STRING; break; case UINT8: typeStmt = BuiltinEffectiveStatement.UINT8; break; case UINT16: typeStmt = BuiltinEffectiveStatement.UINT16; break; case UINT32: typeStmt = BuiltinEffectiveStatement.UINT32; break; case UINT64: typeStmt = BuiltinEffectiveStatement.UINT64; break; default: final QName qname = StmtContextUtils.parseNodeIdentifier(ctx, ctx.getStatementArgument()); final StmtContext<?, TypedefStatement, TypedefEffectiveStatement> typedef = SourceException.throwIfNull(ctx.getFromNamespace(TypeNamespace.class, qname), ctx.getStatementSourceReference(), "Type '%s' not found", qname); typeStmt = typedef.buildEffective().asTypeEffectiveStatement(); } if (ctx.declaredSubstatements().isEmpty() && ctx.effectiveSubstatements().isEmpty()) { return typeStmt; } // Now instantiate the proper effective statement for that type final TypeDefinition<?> baseType = typeStmt.getTypeDefinition(); if (baseType instanceof BinaryTypeDefinition) { return new BinaryTypeEffectiveStatementImpl(ctx, (BinaryTypeDefinition) baseType); } else if (baseType instanceof BitsTypeDefinition) { return new BitsTypeEffectiveStatementImpl(ctx, (BitsTypeDefinition) baseType); } else if (baseType instanceof BooleanTypeDefinition) { return new BooleanTypeEffectiveStatementImpl(ctx, (BooleanTypeDefinition) baseType); } else if (baseType instanceof DecimalTypeDefinition) { return new DecimalTypeEffectiveStatementImpl(ctx, (DecimalTypeDefinition) baseType); } else if (baseType instanceof EmptyTypeDefinition) { return new EmptyTypeEffectiveStatementImpl(ctx, (EmptyTypeDefinition) baseType); } else if (baseType instanceof EnumTypeDefinition) { return new EnumTypeEffectiveStatementImpl(ctx, (EnumTypeDefinition) baseType); } else if (baseType instanceof IdentityrefTypeDefinition) { return new IdentityrefTypeEffectiveStatementImpl(ctx, (IdentityrefTypeDefinition) baseType); } else if (baseType instanceof InstanceIdentifierTypeDefinition) { return new InstanceIdentifierTypeEffectiveStatementImpl(ctx, (InstanceIdentifierTypeDefinition) baseType); } else if (baseType instanceof Int8TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newInt8Builder((Int8TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Int16TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newInt16Builder((Int16TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Int32TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newInt32Builder((Int32TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Int64TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newInt64Builder((Int64TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof LeafrefTypeDefinition) { return new LeafrefTypeEffectiveStatementImpl(ctx, (LeafrefTypeDefinition) baseType); } else if (baseType instanceof StringTypeDefinition) { return new StringTypeEffectiveStatementImpl(ctx, (StringTypeDefinition) baseType); } else if (baseType instanceof Uint8TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newUint8Builder((Uint8TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Uint16TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newUint16Builder((Uint16TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Uint32TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newUint32Builder((Uint32TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof Uint64TypeDefinition) { return new IntegralTypeEffectiveStatementImpl<>(ctx, RestrictedTypes.newUint64Builder((Uint64TypeDefinition) baseType, typeEffectiveSchemaPath(ctx))); } else if (baseType instanceof UnionTypeDefinition) { return new UnionTypeEffectiveStatementImpl(ctx, (UnionTypeDefinition) baseType); } else { throw new IllegalStateException("Unhandled base type " + baseType); } } @Override public final void onFullDefinitionDeclared( final Mutable<String, TypeStatement, EffectiveStatement<String, TypeStatement>> stmt) { super.onFullDefinitionDeclared(stmt); // if it is yang built-in type, no prerequisite is needed, so simply return if (BUILT_IN_TYPES.containsKey(stmt.getStatementArgument())) { return; } final QName typeQName = StmtContextUtils.parseNodeIdentifier(stmt, stmt.getStatementArgument()); final ModelActionBuilder typeAction = stmt.newInferenceAction(ModelProcessingPhase.EFFECTIVE_MODEL); final Prerequisite<StmtContext<?, ?, ?>> typePrereq = typeAction.requiresCtx(stmt, TypeNamespace.class, typeQName, ModelProcessingPhase.EFFECTIVE_MODEL); typeAction.mutatesEffectiveCtx(stmt.getParentContext()); /* * If the type does not exist, throw new InferenceException. * Otherwise perform no operation. */ typeAction.apply(new InferenceAction() { @Override public void apply(final InferenceContext ctx) { // Intentional NOOP } @Override public void prerequisiteFailed(final Collection<? extends Prerequisite<?>> failed) { InferenceException.throwIf(failed.contains(typePrereq), stmt.getStatementSourceReference(), "Type [%s] was not found.", typeQName); } }); } @Override protected final SubstatementValidator getSubstatementValidator() { return SUBSTATEMENT_VALIDATOR; } @Override public final String internArgument(final String rawArgument) { final String found; return (found = BUILT_IN_TYPES.get(rawArgument)) != null ? found : rawArgument; } @Override public boolean hasArgumentSpecificSupports() { return !ARGUMENT_SPECIFIC_SUPPORTS.isEmpty(); } @Override public StatementSupport<?, ?, ?> getSupportSpecificForArgument(final String argument) { return ARGUMENT_SPECIFIC_SUPPORTS.get(argument); } static final SchemaPath typeEffectiveSchemaPath(final StmtContext<?, ?, ?> stmtCtx) { final SchemaPath path = stmtCtx.getSchemaPath().get(); final SchemaPath parent = path.getParent(); final QName parentQName = parent.getLastComponent(); checkArgument(parentQName != null, "Path %s has an empty parent", path); final QName qname = path.getLastComponent().bindTo(parentQName.getModule()).intern(); return parent.createChild(qname); } }
package blog.bn; import blog.ObjectIdentifier; import blog.common.Util; import blog.model.DependencyModel; import blog.model.FuncAppTerm; import blog.model.LogicalVar; import blog.model.Model; import blog.model.RandomFunction; import blog.model.Term; import blog.model.Type; import blog.sample.EvalContext; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; /** * A random variable whose value is the value of a certain random function on a * certain tuple of arguments. */ public class RandFuncAppVar extends VarWithDistrib { /** * Creates a RandFuncAppVar for the given function applied to the given tuple * of arguments. */ public RandFuncAppVar(RandomFunction f, List args) { super(args); this.f = f; } /** * Creates a RandFuncAppVar for the given function applied to the given tuple * of arguments. */ public RandFuncAppVar(RandomFunction f, Object[] args) { super(args); this.f = f; } /** * Creates a RandFuncAppVar for the given function applied to the given tuple * of arguments. If <code>stable</code> is true, then the caller guarantees * that the given <code>args</code> array will not be modified externally. */ public RandFuncAppVar(RandomFunction f, Object[] args, boolean stable) { super(args, stable); this.f = f; } /** * Returns the function being applied in this function application variable. */ public final RandomFunction func() { return f; } /** * Returns the return type of this variable's function. */ public Type getType() { return f.getRetType(); } public int getOrderingIndex() { return f.getDepModel().getCreationIndex(); } public DependencyModel getDepModel() { return f.getDepModel(); } public DependencyModel.Distrib getDistrib(EvalContext context) { context.pushEvaluee(this); DependencyModel depModel = f.getDepModel(); if (depModel == null) { Util.fatalErrorWithoutStack("Can't get distribution for random variable because function " + f.getSig() + " has no dependency statement."); } DependencyModel.Distrib distrib = depModel.getDistribWithBinding(context, f.getArgVars(), args(), Model.NULL); context.popEvaluee(); return distrib; } public FuncAppTerm getCanonicalTerm(Map logicalVarForObj) { List argTerms = new ArrayList(); for (int i = 0; i < args.length; ++i) { Object arg = args[i]; Term term = (LogicalVar) logicalVarForObj.get(arg); if (term == null) { if (arg instanceof ObjectIdentifier) { throw new IllegalArgumentException( "No logical variable specified for object identifier " + arg); } term = f.getArgTypes()[i].getCanonicalTerm(arg); if (term == null) { throw new UnsupportedOperationException( "Can't get canonical term for object " + arg + " of type " + f.getArgTypes()[i]); } } argTerms.add(term); } return new FuncAppTerm(f, argTerms); } public Object clone() { return new RandFuncAppVar(f, args); } /** * Two RandFuncAppVar objects are equal if they have the same function and * their argument arrays are equal (recall that Arrays.equals calls the * <code>equals</code> method on each corresponding pair of objects in the two * arrays). */ public boolean equals(Object obj) { if (obj instanceof RandFuncAppVar) { RandFuncAppVar other = (RandFuncAppVar) obj; return ((f == other.func()) && Arrays.equals(args, other.args())); } return false; } @Override public int hashCode() { int hash = 7; hash = 43 * hash + f.hashCode(); hash = 43 * hash + super.hashCode(); return hash; } @Override public String toString() { if (str != null) { return str; } if (args.length == 0) { str = f.toString(); return str; } StringBuilder buf = new StringBuilder(); buf.append(f); buf.append("("); buf.append(args[0]); for (int i = 1; i < args.length; ++i) { buf.append(", "); buf.append(args[i]); } buf.append(")"); str = buf.toString(); return str; } private String str = null; private final RandomFunction f; }
package org.wickedsource.coderadar.job.scan.commit; import static org.mockito.Mockito.*; import static org.wickedsource.coderadar.factories.entities.EntityFactory.project; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.*; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.Spy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.profiler.Profiler; import org.wickedsource.coderadar.commit.domain.Commit; import org.wickedsource.coderadar.commit.domain.CommitRepository; import org.wickedsource.coderadar.core.WorkdirManager; import org.wickedsource.coderadar.core.configuration.CoderadarConfiguration; import org.wickedsource.coderadar.job.LocalGitRepositoryManager; import org.wickedsource.coderadar.project.domain.Project; import org.wickedsource.coderadar.project.domain.ProjectRepository; import org.wickedsource.coderadar.project.domain.VcsCoordinates; import org.wickedsource.coderadar.testframework.template.GitTestTemplate; import org.wickedsource.coderadar.vcs.git.GitRepositoryChecker; import org.wickedsource.coderadar.vcs.git.GitRepositoryCloner; import org.wickedsource.coderadar.vcs.git.GitRepositoryResetter; import org.wickedsource.coderadar.vcs.git.GitRepositoryUpdater; public class CommitMetadataScannerIntegrationTest extends GitTestTemplate { private Logger logger = LoggerFactory.getLogger(ScanCommitsJob.class); @Mock private ProjectRepository projectRepository; @Mock private CommitRepository commitRepository; @Spy private GitRepositoryChecker gitChecker; @Spy private GitRepositoryCloner gitCloner; private GitRepositoryUpdater gitUpdater; @Mock private WorkdirManager workdirManager; private LocalGitRepositoryManager updater; @Mock private CoderadarConfiguration config; @Mock private MetricRegistry metricRegistry; @BeforeEach public void setup() { MockitoAnnotations.initMocks(this); mock(workdirManager); mock(config); gitUpdater = new GitRepositoryUpdater(new GitRepositoryResetter()); updater = new LocalGitRepositoryManager(gitUpdater, gitCloner, gitChecker, workdirManager); } @AfterEach public void cleanup() { try { if (!Boolean.valueOf(System.getProperty("coderadar.keepTempFiles"))) { FileUtils.deleteDirectory(config.getWorkdir().toFile()); } } catch (IOException e) { logger.warn("could not delete temp dir at {}", config.getWorkdir()); } } @Test @Tag("IntegrationTest") public void scan() { Project project = project().validProject(); Profiler profiler = new Profiler("Scanner"); profiler.setLogger(logger); when(metricRegistry.meter(anyString())).thenReturn(new Meter()); CommitMetadataScanner scanner = new CommitMetadataScanner(commitRepository, updater, metricRegistry); when(projectRepository.findById(project.getId())) .thenReturn(java.util.Optional.ofNullable(createProject())); profiler.start("scanning without local repository present"); File repoRoot = scanner.scan(project).getParentFile(); Assertions.assertTrue(gitChecker.isRepository(repoRoot.toPath())); // scanning again should be fairly quick, since the repository is already cloned profiler.start("re-scanning with local repository present from last test"); scanner.scan(project); Assertions.assertTrue(gitChecker.isRepository(repoRoot.toPath())); verify(commitRepository, atLeast(20)).save(any(Commit.class)); profiler.stop().log(); } private Project createProject() { try { Project project = new Project(); project.setId(1L); project.setName("coderadar"); VcsCoordinates vcs = new VcsCoordinates(); vcs.setUrl(new URL("https://github.com/thombergs/diffparser.git")); project.setVcsCoordinates(vcs); return project; } catch (MalformedURLException e) { throw new RuntimeException(e); } } }
package ro.isdc.wro.config; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.UUID; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ro.isdc.wro.WroRuntimeException; import ro.isdc.wro.config.jmx.WroConfiguration; import ro.isdc.wro.http.WroFilter; import ro.isdc.wro.http.support.FieldsSavingRequestWrapper; import ro.isdc.wro.model.resource.ResourceType; /** * Holds the properties related to a request cycle. * * @author Alex Objelean */ public class Context { private static final Logger LOG = LoggerFactory.getLogger(Context.class); /** * Maps correlationId with a Context. */ private static final Map<String, Context> CONTEXT_MAP = Collections.synchronizedMap(new HashMap<String, Context>()); /** * Holds a correlationId, created in {@link WroFilter}. A correlationId will be associated with a {@link Context} * object. */ private static ThreadLocal<String> CORRELATION_ID = new ThreadLocal<String>(); private WroConfiguration config; /** * Request. */ private transient HttpServletRequest request; /** * Response. */ private transient HttpServletResponse response; /** * ServletContext. */ private transient ServletContext servletContext; /** * FilterConfig. */ private transient FilterConfig filterConfig; /** * The path to the folder, relative to the root, used to compute rewritten image url. */ private String aggregatedFolderPath; /** * @return {@link WroConfiguration} singleton instance. */ public WroConfiguration getConfig() { return config; } /** * DO NOT CALL THIS METHOD UNLESS YOU ARE SURE WHAT YOU ARE DOING. * <p/> * sets the {@link WroConfiguration} singleton instance. */ public void setConfig(final WroConfiguration config) { this.config = config; } /** * A context useful for running in web context (inside a servlet container). */ public static Context webContext(final HttpServletRequest request, final HttpServletResponse response, final FilterConfig filterConfig) { return new Context(request, response, filterConfig); } /** * A context useful for running in non web context (standalone applications). */ public static Context standaloneContext() { return new Context(); } /** * @return {@link Context} associated with CURRENT request cycle. */ public static Context get() { validateContext(); final String correlationId = CORRELATION_ID.get(); LOG.debug("get Context for correlationId: {}", correlationId); return CONTEXT_MAP.get(correlationId); } /** * @return true if the call is done during wro4j request cycle. In other words, if the context is set. */ public static boolean isContextSet() { return CORRELATION_ID.get() != null && CONTEXT_MAP.get(CORRELATION_ID.get()) != null; } /** * Checks if the {@link Context} is accessible from current request cycle. */ private static void validateContext() { if (!isContextSet()) { throw new WroRuntimeException("No context associated with CURRENT request cycle!"); } } /** * Set a context with default configuration to current thread. */ public static void set(final Context context) { set(context, new WroConfiguration()); } /** * Associate a context with the CURRENT request cycle. * * @param context {@link Context} to set. */ public static void set(final Context context, final WroConfiguration config) { Validate.notNull(context); Validate.notNull(config); context.setConfig(config); final String correlationId = generateCorrelationId(); CORRELATION_ID.set(correlationId); CONTEXT_MAP.put(correlationId, context); } /** * @return a string representation of an unique id used to store Context in a map. */ private static String generateCorrelationId() { return UUID.randomUUID().toString(); } /** * Remove context from the local thread. */ public static void unset() { final String correlationId = CORRELATION_ID.get(); if (correlationId != null) { CONTEXT_MAP.remove(correlationId); } CORRELATION_ID.remove(); } /** * Private constructor. Used to build {@link StandAloneContext}. */ private Context() {} /** * Constructor. */ private Context(final HttpServletRequest request, final HttpServletResponse response, final FilterConfig filterConfig) { //TODO check if decorating is necessary this.request = new FieldsSavingRequestWrapper(request); this.response = response; this.servletContext = filterConfig != null ? filterConfig.getServletContext() : null; this.filterConfig = filterConfig; } /** * @return the request */ public HttpServletRequest getRequest() { return this.request; } /** * @return the response */ public HttpServletResponse getResponse() { return this.response; } /** * @return the servletContext */ public ServletContext getServletContext() { return this.servletContext; } /** * @return the filterConfig */ public FilterConfig getFilterConfig() { return this.filterConfig; } /** * @return the aggregatedFolderPath */ public String getAggregatedFolderPath() { return this.aggregatedFolderPath; } /** * This field is useful only for the aggregated resources of type {@link ResourceType#CSS}. </br>The * aggregatedFolderPath is used to compute the depth. For example, if aggregatedFolder is "wro" then the depth is 1 * and the path used to prefix the image url is <code>".."</code>. If the aggregatedFolder is "css/aggregated", the * depth is 2 and the prefix is <code>"../.."</code>. The name of the aggregated folder is not important, it is used * only to compute the depth. * * @param aggregatedFolderPath the aggregatedFolderPath to set */ public void setAggregatedFolderPath(final String aggregatedFolderPath) { this.aggregatedFolderPath = aggregatedFolderPath; } /** * Perform context clean-up. */ public static void destroy() { unset(); //remove all context objects stored in map CONTEXT_MAP.clear(); } /** * {@inheritDoc} */ @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } /** * Set the correlationId to the current thread. */ public static void setCorrelationId(final String correlationId) { Validate.notNull(correlationId); CORRELATION_ID.set(correlationId); } /** * Remove the correlationId from the current thread. This operation will not remove the {@link Context} associated * with the correlationId. In order to remove context, call {@link Context#unset()}. * <p/> * Unsetting correlationId is useful when you create child threads which needs to access the correlationId from the * parent thread. This simulates the {@link InheritableThreadLocal} functionality. */ public static void unsetCorrelationId(final String correlationId) { Validate.notNull(correlationId); CORRELATION_ID.set(correlationId); } /** * @return the correlationId associated with this thread. */ public static String getCorrelationId() { validateContext(); return CORRELATION_ID.get(); } }
package org.deviceconnect.android.deviceplugin.sw.profile; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.BitmapFactory; import com.sonyericsson.extras.liveware.aef.control.Control; import com.sonyericsson.extras.liveware.extension.util.registration.DeviceInfoHelper; import org.deviceconnect.android.deviceplugin.sw.R; import org.deviceconnect.android.deviceplugin.sw.SWConstants; import org.deviceconnect.android.deviceplugin.sw.service.SWService; import org.deviceconnect.android.message.MessageUtils; import org.deviceconnect.android.profile.CanvasProfile; import org.deviceconnect.android.profile.api.DConnectApi; import org.deviceconnect.android.profile.api.PostApi; import org.deviceconnect.android.profile.util.CanvasProfileUtils; import org.deviceconnect.message.DConnectMessage; import java.io.ByteArrayOutputStream; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * SonySW{@link CanvasProfile}. * * @author NTT DOCOMO, INC. */ public class SWCanvasProfile extends CanvasProfile { private final ExecutorService mImageService = Executors.newSingleThreadExecutor(); private final DConnectApi mPostDrawImageApi = new PostApi() { @Override public String getAttribute() { return ATTRIBUTE_DRAW_IMAGE; } @Override public boolean onRequest(final Intent request, final Intent response) { final byte[] data = getData(request); final double x = getX(request); final double y = getY(request); final String mode = getMode(request); String mimeType = getMIMEType(request); if (mimeType != null && !mimeType.contains("image")) { MessageUtils.setInvalidRequestParameterError(response, "Unsupported mimeType: " + mimeType); return true; } if (data == null) { mImageService.execute(new Runnable() { @Override public void run() { String uri = getURI(request); byte[] result = getData(uri); if (result == null) { MessageUtils.setInvalidRequestParameterError(response, "could not get image from uri."); sendResponse(response); return; } drawImage(response, result, x, y, mode); sendResponse(response); } }); return false; } else { drawImage(response, data, x, y, mode); return true; } } }; public SWCanvasProfile() { addApi(mPostDrawImageApi); } private void drawImage(final Intent response, byte[] data, double x, double y, String mode) { DisplaySize size = determineDisplaySize(getContext(), ((SWService) getService()).getHostPackageName()); boolean result = showDisplay(data, x, y, mode, size); if (!result) { /* unknown mode-value. */ MessageUtils.setInvalidRequestParameterError(response); return; } setResult(response, DConnectMessage.RESULT_OK); } /** * SW. * * @param data * @param x x * @param y y * @param mode * @param size * @return true: success / false: error(unknown mode-value) */ private boolean showDisplay(final byte[] data, final double x, final double y, final String mode, final DisplaySize size) { BitmapFactory.Options options = new BitmapFactory.Options(); options.inMutable = true; Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length, options); // SWBitmap(SW) final int width = size.getWidth(); final int height = size.getHeight(); Bitmap viewBitmap = Bitmap.createBitmap(width, height, SWConstants.DEFAULT_BITMAP_CONFIG); boolean isDraw = false; if (mode == null || mode.equals("")) { CanvasProfileUtils.drawImageForNonScalesMode(viewBitmap, bitmap, x, y); isDraw = true; } else if (mode.equals(Mode.SCALES.getValue())) { CanvasProfileUtils.drawImageForScalesMode(viewBitmap, bitmap); isDraw = true; } else if (mode.equals(Mode.FILLS.getValue())) { CanvasProfileUtils.drawImageForFillsMode(viewBitmap, bitmap); isDraw = true; } if (isDraw) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(SWConstants.OUTPUTSTREAM_SIZE); viewBitmap.compress(CompressFormat.JPEG, SWConstants.BITMAP_DECODE_QUALITY, outputStream); Intent intent = new Intent(Control.Intents.CONTROL_DISPLAY_DATA_INTENT); intent.putExtra(Control.Intents.EXTRA_DATA, outputStream.toByteArray()); sendToHostApp(intent); return true; } else { return false; } } /** * SW. * * @param context * @param hostAppPackageName (SW1orSW2) * @return */ private static DisplaySize determineDisplaySize(final Context context, final String hostAppPackageName) { boolean smartWatch2Supported = DeviceInfoHelper.isSmartWatch2ApiAndScreenDetected(context, hostAppPackageName); int width; int height; if (smartWatch2Supported) { width = context.getResources().getDimensionPixelSize(R.dimen.smart_watch_2_control_width); height = context.getResources().getDimensionPixelSize(R.dimen.smart_watch_2_control_height); } else { width = context.getResources().getDimensionPixelSize(R.dimen.smart_watch_control_width); height = context.getResources().getDimensionPixelSize(R.dimen.smart_watch_control_height); } return new DisplaySize(width, height); } private void sendToHostApp(final Intent request) { ((SWService) getService()).sendRequest(request); } }
//$HeadURL$ package org.deegree.securityproxy.authorization.wcs; import org.deegree.securityproxy.authentication.wcs.WcsPermission; import org.deegree.securityproxy.authorization.logging.AuthorizationReport; import org.deegree.securityproxy.commons.WcsOperationType; import org.deegree.securityproxy.commons.WcsServiceVersion; import org.deegree.securityproxy.request.WcsRequest; import org.junit.Test; import org.springframework.security.core.Authentication; import javax.servlet.http.HttpServletRequest; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import static org.deegree.securityproxy.authorization.wcs.WcsRequestAuthorizationManager.AUTHORIZED; import static org.deegree.securityproxy.authorization.wcs.WcsRequestAuthorizationManager.NOT_AUTHORIZED; import static org.deegree.securityproxy.commons.WcsOperationType.*; import static org.deegree.securityproxy.commons.WcsServiceVersion.VERSION_100; import static org.deegree.securityproxy.commons.WcsServiceVersion.VERSION_200; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.*; public class WcsRequestAuthorizationManagerTest { private static final WcsServiceVersion VERSION = VERSION_100; private static final WcsOperationType OPERATION_TYPE = GETCOVERAGE; private static final String SERVICE_NAME = "serviceName"; private static final String COVERAGE_NAME = "layerName"; private static final String INTERNAL_SERVICE_URL = "serviceUrl"; private RequestAuthorizationManager authorizationManager = new WcsRequestAuthorizationManager(); @Test public void testSupportsWcsRequestShouldBeSupported() throws Exception { boolean isSupported = authorizationManager.supports( WcsRequest.class ); assertThat( isSupported, is( true ) ); } @Test public void testSupportsHttpServletRequestShouldBeUnsupported() throws Exception { boolean isSupported = authorizationManager.supports( HttpServletRequest.class ); assertThat( isSupported, is( false ) ); } @Test public void testDecideWithSingleAuthorization() throws Exception { Authentication authentication = mockDefaultAuthentication(); WcsRequest request = mockDefaultRequest(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( AUTHORIZED ) ); } @Test public void testDecideWithMultipleAuthorizations() throws Exception { Authentication authentication = mockDefaultAuthenticationWithMultiplePermissions(); WcsRequest request = mockGetCapabilitiesRequest(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( AUTHORIZED ) ); } @Test public void testDecideMultipleAuthorizationsShouldBeRefusedCauseOfVersion() throws Exception { Authentication authentication = mockDefaultAuthenticationWithMultiplePermissions(); WcsRequest request = mockGetCapabilitiesRequestWithUnsupportedVersion(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( NOT_AUTHORIZED ) ); assertThat( report.getMessage(), is( WcsRequestAuthorizationManager.GETCAPABILITIES_UNAUTHORIZED_MSG ) ); } @Test public void testDecideSingleAuthorizationShouldBeRefusedCauseOfVersion() throws Exception { Authentication authentication = mockDefaultAuthentication(); WcsRequest request = mockRequestWithUnsupportedVersion(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( NOT_AUTHORIZED ) ); assertThat( report.getMessage(), is( WcsRequestAuthorizationManager.GETCOVERAGE_UNAUTHORIZED_MSG ) ); } @Test public void testDecideSingleAuthorizationShouldBeRefusedCauseOfOperationType() throws Exception { Authentication authentication = mockDefaultAuthentication(); WcsRequest request = mockRequestWithUnsupportedOperationType(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( NOT_AUTHORIZED ) ); } @Test public void testDecideSingleAuthorizationShouldBeRefusedBecauseOfCovName() throws Exception { Authentication authentication = mockDefaultAuthentication(); WcsRequest request = mockRequestWithUnsupportedLayerName(); AuthorizationReport report = authorizationManager.decide( authentication, request ); assertThat( report.isAuthorized(), is( NOT_AUTHORIZED ) ); assertThat( report.getMessage(), is( WcsRequestAuthorizationManager.GETCOVERAGE_UNAUTHORIZED_MSG ) ); } private WcsRequest mockDefaultRequest() { return mockRequest( COVERAGE_NAME, OPERATION_TYPE, SERVICE_NAME, VERSION ); } private WcsRequest mockGetCapabilitiesRequest() { return mockRequest( null, GETCAPABILITIES, SERVICE_NAME, VERSION ); } private WcsRequest mockGetCapabilitiesRequestWithUnsupportedVersion() { return mockRequest( null, GETCAPABILITIES, SERVICE_NAME, VERSION_200 ); } private WcsRequest mockRequestWithUnsupportedVersion() { return mockRequest( COVERAGE_NAME, OPERATION_TYPE, SERVICE_NAME, VERSION_200 ); } private WcsRequest mockRequestWithUnsupportedOperationType() { return mockRequest( COVERAGE_NAME, DESCRIBECOVERAGE, SERVICE_NAME, VERSION ); } private WcsRequest mockRequestWithUnsupportedLayerName() { return mockRequest( "unknown", OPERATION_TYPE, SERVICE_NAME, VERSION ); } private WcsRequest mockRequest( String layerName, WcsOperationType operationType, String serviceName, WcsServiceVersion version ) { WcsRequest mock = mock( WcsRequest.class ); when( mock.getCoverageNames() ).thenReturn( Collections.singletonList( layerName ) ); when( mock.getOperationType() ).thenReturn( operationType ); when( mock.getServiceVersion() ).thenReturn( version ); when( mock.getServiceName() ).thenReturn( serviceName ); return mock; } private Authentication mockDefaultAuthentication() { Authentication authentication = mock( Authentication.class ); Collection<WcsPermission> authorities = new ArrayList<WcsPermission>(); authorities.add( new WcsPermission( OPERATION_TYPE, VERSION, COVERAGE_NAME, SERVICE_NAME, INTERNAL_SERVICE_URL ) ); doReturn( authorities ).when( authentication ).getAuthorities(); return authentication; } private Authentication mockDefaultAuthenticationWithMultiplePermissions() { Authentication authentication = mock( Authentication.class ); Collection<WcsPermission> authorities = new ArrayList<WcsPermission>(); authorities.add( new WcsPermission( OPERATION_TYPE, VERSION, COVERAGE_NAME, SERVICE_NAME, INTERNAL_SERVICE_URL ) ); authorities.add( new WcsPermission( GETCAPABILITIES, VERSION, null, SERVICE_NAME, INTERNAL_SERVICE_URL ) ); doReturn( authorities ).when( authentication ).getAuthorities(); return authentication; } }
package io.quarkus.hibernate.orm.deployment; import java.util.Collections; import java.util.Map; import java.util.Set; import io.quarkus.builder.item.SimpleBuildItem; /** * Internal model to hold the mapping linking a JPA entity to its corresponding persistence units. */ public final class JpaModelPersistenceUnitMappingBuildItem extends SimpleBuildItem { private final Map<String, Set<String>> entityToPersistenceUnits; public JpaModelPersistenceUnitMappingBuildItem(Map<String, Set<String>> entityToPersistenceUnits) { this.entityToPersistenceUnits = Collections.unmodifiableMap(entityToPersistenceUnits); } public Map<String, Set<String>> getEntityToPersistenceUnits() { return entityToPersistenceUnits; } }
package org.innovateuk.ifs.project.bankdetails.transactional; import org.innovateuk.ifs.BaseServiceUnitTest; import org.innovateuk.ifs.address.domain.Address; import org.innovateuk.ifs.address.domain.AddressType; import org.innovateuk.ifs.address.repository.AddressRepository; import org.innovateuk.ifs.address.repository.AddressTypeRepository; import org.innovateuk.ifs.address.resource.AddressResource; import org.innovateuk.ifs.application.domain.Application; import org.innovateuk.ifs.commons.error.Error; import org.innovateuk.ifs.commons.service.ServiceResult; import org.innovateuk.ifs.competition.domain.Competition; import org.innovateuk.ifs.competition.resource.BankDetailsReviewResource; import org.innovateuk.ifs.finance.transactional.FinanceService; import org.innovateuk.ifs.organisation.domain.Organisation; import org.innovateuk.ifs.organisation.domain.OrganisationAddress; import org.innovateuk.ifs.organisation.mapper.OrganisationAddressMapper; import org.innovateuk.ifs.organisation.repository.OrganisationAddressRepository; import org.innovateuk.ifs.organisation.repository.OrganisationRepository; import org.innovateuk.ifs.organisation.resource.OrganisationAddressResource; import org.innovateuk.ifs.organisation.resource.OrganisationTypeEnum; import org.innovateuk.ifs.project.bankdetails.builder.BankDetailsBuilder; import org.innovateuk.ifs.project.bankdetails.domain.BankDetails; import org.innovateuk.ifs.project.bankdetails.mapper.BankDetailsMapper; import org.innovateuk.ifs.project.bankdetails.mapper.SILBankDetailsMapper; import org.innovateuk.ifs.project.bankdetails.repository.BankDetailsRepository; import org.innovateuk.ifs.project.bankdetails.resource.BankDetailsResource; import org.innovateuk.ifs.project.bankdetails.resource.BankDetailsStatusResource; import org.innovateuk.ifs.project.bankdetails.resource.ProjectBankDetailsStatusSummary; import org.innovateuk.ifs.project.constant.ProjectActivityStates; import org.innovateuk.ifs.project.core.domain.Project; import org.innovateuk.ifs.project.core.repository.ProjectRepository; import org.innovateuk.ifs.project.core.util.ProjectUsersHelper; import org.innovateuk.ifs.project.projectdetails.workflow.configuration.ProjectDetailsWorkflowHandler; import org.innovateuk.ifs.project.resource.ProjectOrganisationCompositeId; import org.innovateuk.ifs.sil.experian.resource.*; import org.innovateuk.ifs.sil.experian.service.SilExperianEndpoint; import org.innovateuk.ifs.user.domain.ProcessRole; import org.innovateuk.ifs.user.resource.Role; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import java.util.List; import java.util.Optional; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.innovateuk.ifs.address.builder.AddressBuilder.newAddress; import static org.innovateuk.ifs.address.builder.AddressResourceBuilder.newAddressResource; import static org.innovateuk.ifs.address.resource.OrganisationAddressType.BANK_DETAILS; import static org.innovateuk.ifs.application.builder.ApplicationBuilder.newApplication; import static org.innovateuk.ifs.commons.error.CommonFailureKeys.*; import static org.innovateuk.ifs.commons.service.ServiceResult.serviceSuccess; import static org.innovateuk.ifs.competition.builder.CompetitionBuilder.newCompetition; import static org.innovateuk.ifs.organisation.builder.OrganisationAddressBuilder.newOrganisationAddress; import static org.innovateuk.ifs.organisation.builder.OrganisationBuilder.newOrganisation; import static org.innovateuk.ifs.organisation.builder.OrganisationTypeBuilder.newOrganisationType; import static org.innovateuk.ifs.project.bankdetails.builder.BankDetailsResourceBuilder.newBankDetailsResource; import static org.innovateuk.ifs.project.bankdetails.builder.BankDetailsStatusResourceBuilder.newBankDetailsStatusResource; import static org.innovateuk.ifs.project.bankdetails.builder.ProjectBankDetailsStatusSummaryBuilder.newProjectBankDetailsStatusSummary; import static org.innovateuk.ifs.project.core.builder.ProjectBuilder.newProject; import static org.innovateuk.ifs.project.resource.ProjectState.*; import static org.innovateuk.ifs.user.builder.ProcessRoleBuilder.newProcessRole; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.*; import static org.springframework.http.HttpStatus.NOT_FOUND; public class BankDetailsServiceImplTest extends BaseServiceUnitTest<BankDetailsService> { private BankDetailsResource bankDetailsResource; private Project project; private Organisation organisation; private BankDetails bankDetails; private SILBankDetails silBankDetails; private AccountDetails accountDetails; private SILBankDetailsMapper silBankDetailsMapper = new SILBankDetailsMapper(); @Mock private BankDetailsRepository bankDetailsRepositoryMock; @Mock private BankDetailsMapper bankDetailsMapperMock; @Mock private AddressRepository addressRepositoryMock; @Mock private ProjectRepository projectRepositoryMock; @Mock private OrganisationAddressRepository organisationAddressRepositoryMock; @Mock private SilExperianEndpoint silExperianEndpointMock; @Mock private ProjectDetailsWorkflowHandler projectDetailsWorkflowHandlerMock; @Mock private ProjectUsersHelper projectUsersHelperMock; @Mock private FinanceService financeServiceMock; @Mock private OrganisationRepository organisationRepositoryMock; @Mock private AddressTypeRepository addressTypeRepository; @Mock private OrganisationAddressMapper organisationAddressMapper; @Before public void setUp() { organisation = newOrganisation().build(); project = newProject().build(); AddressResource addressResource = newAddressResource().build(); Address address = newAddress().build(); OrganisationAddress organisationAddress = newOrganisationAddress().build(); bankDetailsResource = newBankDetailsResource() .withProject(project.getId()) .withSortCode("123123") .withAccountNumber("12345678") .withOrganisation(organisation.getId()) .withAddress(addressResource) .build(); bankDetails = BankDetailsBuilder.newBankDetails() .withSortCode(bankDetailsResource.getSortCode()) .withAccountNumber(bankDetailsResource.getAccountNumber()) .withOrganisation(organisation) .withOrganiationAddress(organisationAddress) .build(); accountDetails = silBankDetailsMapper.toAccountDetails(bankDetailsResource); silBankDetails = silBankDetailsMapper.toSILBankDetails(bankDetailsResource); when(bankDetailsMapperMock.mapToDomain(bankDetailsResource)).thenReturn(bankDetails); when(addressRepositoryMock.findById(addressResource.getId())).thenReturn(Optional.of(address)); when(bankDetailsRepositoryMock.save(bankDetails)).thenReturn(bankDetails); when(projectRepositoryMock.findById(bankDetailsResource.getProject())).thenReturn(Optional.of(project)); when(addressTypeRepository.findById(BANK_DETAILS.getOrdinal())).thenReturn(Optional.of(new AddressType())); when(organisationAddressMapper.mapToDomain(any(OrganisationAddressResource.class))).thenReturn(organisationAddress); } @Test public void getBankDetailsByProjectAndOrganisation() { when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(project.getId(), organisation.getId())).thenReturn(bankDetails); when(bankDetailsMapperMock.mapToResource(bankDetails)).thenReturn(bankDetailsResource); ServiceResult<BankDetailsResource> result = service.getByProjectAndOrganisation(project.getId(), organisation.getId()); assertTrue(result.isSuccess()); assertEquals(result.getSuccess(), bankDetailsResource); } @Test public void getBankDetailsByProjectAndOrganisationButTheyDontExist() { when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(project.getId(), organisation.getId())).thenReturn(null); ServiceResult<BankDetailsResource> result = service.getByProjectAndOrganisation(project.getId(), organisation.getId()); assertTrue(result.isFailure()); Error expectedError = new Error(BANK_DETAILS_DONT_EXIST_FOR_GIVEN_PROJECT_AND_ORGANISATION, asList(project.getId(), organisation.getId()), NOT_FOUND); assertTrue(result.getFailure().is(expectedError)); } @Test public void saveValidBankDetails() { ValidationResult validationResult = new ValidationResult(); validationResult.setCheckPassed(true); when(silExperianEndpointMock.validate(silBankDetails)).thenReturn(serviceSuccess(validationResult)); VerificationResult verificationResult = new VerificationResult(); when(silExperianEndpointMock.verify(accountDetails)).thenReturn(serviceSuccess(verificationResult)); when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(null, bankDetails); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); ServiceResult<Void> result = service.submitBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isSuccess()); } @Test public void bankDetailsCanBeSubmittedBeforeProjectDetails() { ValidationResult validationResult = new ValidationResult(); VerificationResult verificationResult = new VerificationResult(); validationResult.setCheckPassed(true); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(false); when(bankDetailsMapperMock.mapToDomain(bankDetailsResource)).thenReturn(bankDetails); when(silExperianEndpointMock.validate(silBankDetails)).thenReturn(serviceSuccess(validationResult)); when(silExperianEndpointMock.verify(accountDetails)).thenReturn(serviceSuccess(verificationResult)); when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(null, bankDetails); ServiceResult<Void> result = service.submitBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isSuccess()); verify(bankDetailsRepositoryMock, times(2)).save(bankDetails); } @Test public void bankDetailsAreNotSavedIfExperianValidationFails() { ValidationResult validationResult = new ValidationResult(); Condition condition = new Condition(); condition.setSeverity("error"); condition.setDescription("Invalid sort code"); condition.setCode(5); validationResult.setConditions(singletonList(condition)); validationResult.setCheckPassed(false); when(silExperianEndpointMock.validate(silBankDetails)).thenReturn(serviceSuccess(validationResult)); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); service.submitBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()), bankDetailsResource); verify(silExperianEndpointMock, never()).verify(accountDetails); verify(bankDetailsRepositoryMock, times(1)).findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation()); } @Test public void testVerificationOccursOnceBankDetailsAreSaved() { ValidationResult validationResult = new ValidationResult(); validationResult.setCheckPassed(true); VerificationResult verificationResult = new VerificationResult(); when(silExperianEndpointMock.validate(silBankDetails)).thenReturn(serviceSuccess(validationResult)); when(silExperianEndpointMock.verify(accountDetails)).thenReturn(serviceSuccess(verificationResult)); when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(null, bankDetails); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); service.submitBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); verify(silExperianEndpointMock, times(1)).verify(accountDetails); verify(bankDetailsRepositoryMock, times(2)).findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation()); } @Test public void updateOfBankDetailsWithExistingBankDetailsPresent() { when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(bankDetails); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); ServiceResult<Void> result = service.updateBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isSuccess()); } @Test public void updateOfBankDetailsWithProjectDetailsNotSubmited() { when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(bankDetails); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(false); ServiceResult<Void> result = service.updateBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isSuccess()); verify(bankDetailsRepositoryMock).save(bankDetails); } @Test public void updateOfBankDetailsWithExistingBankDetailsNotPresent() { when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(null); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); ServiceResult<Void> result = service.updateBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(BANK_DETAILS_CANNOT_BE_UPDATED_BEFORE_BEING_SUBMITTED)); } @Test public void updateOfBankDetailsWileApreadyApprovedNotAllowed() { bankDetailsResource.setManualApproval(true); bankDetails.setManualApproval(true); when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(bankDetailsResource.getProject(), bankDetailsResource.getOrganisation())).thenReturn(bankDetails); when(projectDetailsWorkflowHandlerMock.isSubmitted(project)).thenReturn(true); ServiceResult<Void> result = service.updateBankDetails(new ProjectOrganisationCompositeId(project.getId(), organisation.getId()),bankDetailsResource); assertTrue(result.isFailure()); assertTrue(result.getFailure().is(BANK_DETAILS_HAVE_ALREADY_BEEN_APPROVED_AND_CANNOT_BE_UPDATED)); } @Test public void getProjectBankDetailsStatusSummary() { Long projectId = 123L; Competition competition = newCompetition().withName("Greener Jet Engines").build(); Application application = newApplication().withCompetition(competition).build(); organisation.setOrganisationType(newOrganisationType().withOrganisationType(OrganisationTypeEnum.BUSINESS).build()); ProcessRole leadApplicantRole = newProcessRole().withRole(Role.LEADAPPLICANT).withOrganisationId(organisation.getId()).withApplication(application).build(); Project project = newProject().withId(projectId).withApplication(application).build(); when(projectRepositoryMock.findById(projectId)).thenReturn(Optional.of(project)); when(bankDetailsRepositoryMock.findByProjectIdAndOrganisationId(projectId, organisation.getId())).thenReturn(bankDetails); when(bankDetailsMapperMock.mapToResource(bankDetails)).thenReturn(bankDetailsResource); when(projectUsersHelperMock.getPartnerOrganisations(projectId)).thenReturn(singletonList(organisation)); when(financeServiceMock.organisationSeeksFunding(project.getApplication().getId(), organisation.getId())).thenReturn(serviceSuccess(true)); when(organisationRepositoryMock.findById(leadApplicantRole.getOrganisationId())).thenReturn(Optional.of(organisation)); List<BankDetailsStatusResource> bankDetailsStatusResource = newBankDetailsStatusResource() .withOrganisationId(organisation.getId()) .withOrganisationName(organisation.getName()) .withBankDetailsStatus(ProjectActivityStates.ACTION_REQUIRED) .build(1); ProjectBankDetailsStatusSummary expected = newProjectBankDetailsStatusSummary().build(); expected.setProjectId(projectId); expected.setApplicationId(application.getId()); expected.setCompetitionId(competition.getId()); expected.setCompetitionName(competition.getName()); expected.setBankDetailsStatusResources(bankDetailsStatusResource); ServiceResult<ProjectBankDetailsStatusSummary> result = service.getProjectBankDetailsStatusSummary(projectId); assertTrue(result.isSuccess()); assertEquals(expected, result.getSuccess()); } @Test public void getPendingBankDetailsApprovals() { List<BankDetailsReviewResource> pendingBankDetails = singletonList(new BankDetailsReviewResource( 1L, 11L, "Comp1", 12L, "project1", 22L, "Org1")); when(bankDetailsRepositoryMock.getPendingBankDetailsApprovalsForProjectStateNotIn(asList(WITHDRAWN, HANDLED_OFFLINE, COMPLETED_OFFLINE))).thenReturn(pendingBankDetails); ServiceResult<List<BankDetailsReviewResource>> result = service.getPendingBankDetailsApprovals(); assertTrue(result.isSuccess()); assertEquals(pendingBankDetails, result.getSuccess()); } @Test public void countPendingBankDetailsApprovals() { Long pendingBankDetailsCount = 8L; when(bankDetailsRepositoryMock.countPendingBankDetailsApprovalsForProjectStateNotIn(asList(WITHDRAWN, HANDLED_OFFLINE, COMPLETED_OFFLINE))).thenReturn(pendingBankDetailsCount); ServiceResult<Long> result = service.countPendingBankDetailsApprovals(); assertTrue(result.isSuccess()); assertEquals(pendingBankDetailsCount, result.getSuccess()); } @Override protected BankDetailsService supplyServiceUnderTest() { return new BankDetailsServiceImpl(); } }
package org.phenotips.matchingnotification.notification.internal; import org.phenotips.matchingnotification.match.PatientInMatch; import org.phenotips.matchingnotification.match.PatientMatch; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.mail.Address; import javax.mail.Message.RecipientType; import javax.mail.internet.InternetAddress; import org.apache.commons.lang3.StringUtils; import com.xpn.xwiki.XWikiContext; /** * An email that is supposedly sent by the current user to notify about a match involving the * user's patient. The recipient is the owner(s) of the other patient in the match, and may * be either local or remote. * * @version $Id$ */ public class DefaultUserPatientMatchEmail extends AbstractPatientMatchEmail { /** Name of document containing template for email notification. */ public static final String EMAIL_TEMPLATE = "UserMatchNotificationEmailTemplate"; /** Name of the XWiki User Document field that contains user email. */ public static final String USER_PROPERTY_EMAIL = "email"; private PatientInMatch myPatient; /** * Build a new email object for the given match. One of the patients in the match should * have same id and server id as {@code subjectPatientId}. * * @param match the match that the email notifies of * @param subjectPatientId id of patient who is the subject of this email. * Owner of this patient will be notified * @param subjectServerId id of the server that holds the subjectPatientId */ public DefaultUserPatientMatchEmail(PatientMatch match, String subjectPatientId, String subjectServerId) { super(subjectPatientId, subjectServerId, Collections.singletonList(match), null, null); } /** * Same as above, but allows using custom emial texts. * * @param match the match that the email notifies of * @param subjectPatientId id of patient who is the subject of this email. * Owner of this patient will be notified * @param subjectServerId id of the server that holds the subjectPatientId * @param customEmailText (optional) custom text to be used for the email * @param customEmailSubject (optional) custom subject to be used for the email */ public DefaultUserPatientMatchEmail(PatientMatch match, String subjectPatientId, String subjectServerId, String customEmailText, String customEmailSubject) { super(subjectPatientId, subjectServerId, Collections.singletonList(match), customEmailText, customEmailSubject); } @Override protected void init(String subjectPatientId, String subjectServerId) { super.init(subjectPatientId, subjectServerId); PatientMatch match = this.matches.iterator().next(); if (match.getReference() == this.subjectPatient) { this.myPatient = match.getMatched(); } else { this.myPatient = match.getReference(); } } @Override protected String getEmailTemplate() { return EMAIL_TEMPLATE; } @Override protected Map<String, Object> createVelocityVariablesMap() { // TODO: use the same set of variables that DefaultAdminPatientMatchEmail uses // to be able to draw a nice HTML table with matches Map<String, Object> velocityVariables = new HashMap<>(); velocityVariables.put("subjectPatient", this.subjectPatient); if (this.subjectPatient.getServerId() == null) { // current user may have no access to the other patient, so velocity can't be used // to get the external URl of the other patient try { XWikiContext context = CONTEXT_PROVIDER.get(); String linkURL = context.getWiki().getDocument( this.subjectPatient.getPatient().getDocumentReference(), context).getExternalURL("view", context); velocityVariables.put("subjectPatientLink", linkURL); } catch (Exception ex) { velocityVariables.put("subjectPatientLink", this.subjectPatient.getPatient().getId()); } } velocityVariables.put("myPatient", this.myPatient); return velocityVariables; } @Override protected void setTo() { super.setTo(); try { Set<Address> userEmails = getUserEmails(); for (Address email : userEmails) { if (email != null) { this.mimeMessage.addRecipient(RecipientType.CC, email); } } } catch (Exception ex) { // do nothing } } @Override protected void setFrom() { super.setFrom(); try { Set<Address> userEmails = getUserEmails(); if (userEmails != null) { Address[] address = new Address[userEmails.size()]; this.mimeMessage.setReplyTo(userEmails.toArray(address)); } } catch (Exception ex) { // do nothing } } private Set<Address> getUserEmails() { String userEmail = USERMANAGER.getCurrentUser().getAttribute(USER_PROPERTY_EMAIL).toString(); Set<Address> emails = new HashSet<>(); for (String parsedEmail : StringUtils.split(userEmail, ",|;")) { if (StringUtils.isNotBlank(parsedEmail)) { try { InternetAddress email = new InternetAddress(parsedEmail.trim()); emails.add(email); } catch (Exception ex) { LOGGER.error("Error parsing email [{}]: {}", parsedEmail, ex.getMessage(), ex); } } } return emails; } }
package org.jboss.wsf.stack.cxf.addons.transports.httpserver; import java.io.IOException; import java.net.InetSocketAddress; import java.util.concurrent.Executors; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.cxf.Bus; import org.apache.cxf.common.logging.LogUtils; import org.jboss.ws.httpserver_httpspi.PathUtils; import org.jboss.com.sun.net.httpserver.HttpHandler; import org.jboss.com.sun.net.httpserver.HttpServer; /** * A server engine that internally uses the JDK6 httpserver * * @author alessio.soldano@jboss.com * @since 19-Aug-2010 * */ public class HttpServerEngine { private static final Logger LOG = LogUtils.getL7dLogger(HttpServerEngine.class); private static final int DELAY = Integer.getInteger(System.getProperty(HttpServerEngineFactory.class.getName() + ".STOP_DELAY"), 1); private static final int BACKLOG = 0; private Bus bus; private HttpServerEngineFactory factory; private String host; private int port; private int handlerCount; private String protocol = "http"; private HttpServer server; public HttpServerEngine(HttpServerEngineFactory fac, Bus bus, String host, int port) { this.bus = bus; this.factory = fac; this.host = host; this.port = port; } public Bus getBus() { return bus; } public String getProtocol() { return protocol; } public int getPort() { return port; } public String getHost() { return host; } public synchronized void addHandler(String address, HttpHandler handler) { if (server == null) //start the server on first call { InetSocketAddress isa = host != null ? new InetSocketAddress(host, port) : new InetSocketAddress(port); try { server = HttpServer.create(isa, BACKLOG); server.setExecutor(Executors.newCachedThreadPool()); server.start(); } catch (IOException e) { throw new RuntimeException(e); } } server.createContext(PathUtils.getContextPath(address) + PathUtils.getPath(address), handler); handlerCount++; } public synchronized void removeHandler(String address) { server.removeContext(PathUtils.getContextPath(address) + PathUtils.getPath(address)); handlerCount } /** * This method is called by the ServerEngine Factory to destroy the server */ protected void stop() throws Exception { if (server != null) { server.stop(DELAY); } } /** * This method will shut down the server engine and * remove it from the factory's cache. */ public void shutdown() { if (factory != null && handlerCount == 0) { factory.destroyForPort(port); } else { LOG.log(Level.WARNING, "FAILED_TO_SHUTDOWN_ENGINE_MSG", port); } } }
package beast.evolution.tree; import java.util.*; import beast.core.Description; import beast.core.Input; import beast.core.StateNode; import beast.core.StateNodeInitialiser; import beast.core.util.Log; import beast.core.BEASTInterface; import beast.core.Input.Validate; import beast.evolution.alignment.Alignment; import beast.evolution.alignment.TaxonSet; import beast.evolution.tree.coalescent.PopulationFunction; import beast.math.distributions.MRCAPrior; import beast.math.distributions.ParametricDistribution; import beast.util.HeapSort; import beast.util.Randomizer; @Description("This class provides the basic engine for coalescent simulation of a given demographic model over a given time period. ") public class RandomTree extends Tree implements StateNodeInitialiser { public Input<Alignment> taxaInput = new Input<>("taxa", "set of taxa to initialise tree specified by alignment"); public Input<PopulationFunction> populationFunctionInput = new Input<PopulationFunction>("populationModel", "population function for generating coalescent???", Validate.REQUIRED); public Input<List<MRCAPrior>> calibrationsInput = new Input<List<MRCAPrior>>("constraint", "specifies (monophyletic or height distribution) constraints on internal nodes", new ArrayList<MRCAPrior>()); public Input<Double> rootHeightInput = new Input<Double>("rootHeight", "If specified the tree will be scaled to match the root height, if constraints allow this"); // total nr of taxa int nrOfTaxa; class Bound { Double upper = Double.POSITIVE_INFINITY; Double lower = Double.NEGATIVE_INFINITY; public String toString() { return "[" + lower + "," + upper + "]"; } } // Location of last monophyletic clade in the lists below, which are grouped together at the start. // (i.e. the first isMonophyletic of the TaxonSets are monophyletic, while the remainder are not). int lastMonophyletic; // taxonSets,distributions, m_bounds and taxonSetIDs are indexed together (four values associated with this clade, a set of taxa. // taxon sets of clades that has a constraint of calibrations. Monophyletic constraints may be nested, and are sorted by the code to be at a // higher index, i.e iterating from zero up does post-order (descendants before parent). List<Set<String>> taxonSets; // list of parametric distribution constraining the MRCA of taxon sets, null if not present List<ParametricDistribution> distributions; // hard bound for the set, if any List<Bound> m_bounds; // The prior element involved, if any List<String> taxonSetIDs; List<Integer>[] children; Set<String> sTaxa; // number of the next internal node, used when creating new internal nodes int nextNodeNr; // used to indicate one of the MRCA constraints could not be met protected class ConstraintViolatedException extends Exception { private static final long serialVersionUID = 1L; } @Override public void initAndValidate() throws Exception { sTaxa = new LinkedHashSet<>(); if (taxaInput.get() != null) { sTaxa.addAll(taxaInput.get().getTaxaNames()); } else { sTaxa.addAll(m_taxonset.get().asStringList()); } nrOfTaxa = sTaxa.size(); initStateNodes(); super.initAndValidate(); } @SuppressWarnings({"rawtypes", "unchecked"}) private void swap(final List list, final int i, final int j) { final Object tmp = list.get(i); list.set(i, list.get(j)); list.set(j, tmp); } // taxonset intersection test // private boolean intersects(final BitSet bitSet, final BitSet bitSet2) { // for (int k = bitSet.nextSetBit(0); k >= 0; k = bitSet.nextSetBit(k + 1)) { // if (bitSet2.get(k)) { // return true; // return false; // returns true if bitSet is a subset of bitSet2 // private boolean isSubset(final BitSet bitSet, final BitSet bitSet2) { // boolean bIsSubset = true; // for (int k = bitSet.nextSetBit(0); bIsSubset && k >= 0; k = bitSet.nextSetBit(k + 1)) { // bIsSubset = bitSet2.get(k); // return bIsSubset; //@Override public void initStateNodes() throws Exception { // find taxon sets we are dealing with taxonSets = new ArrayList<>(); m_bounds = new ArrayList<>(); distributions = new ArrayList<>(); taxonSetIDs = new ArrayList<>(); lastMonophyletic = 0; if (taxaInput.get() != null) { sTaxa.addAll(taxaInput.get().getTaxaNames()); } else { sTaxa.addAll(m_taxonset.get().asStringList()); } // pick up constraints from outputs, m_inititial input tree and output tree, if any List<MRCAPrior> calibrations = new ArrayList<MRCAPrior>(); calibrations.addAll(calibrationsInput.get()); // for (Plugin plugin : outputs) { // // pick up constraints in outputs // if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) { // calibrations.add((MRCAPrior) plugin); // } else if (plugin instanceof Tree) { // // pick up constraints in outputs if output tree // Tree tree = (Tree) plugin; // if (tree.m_initial.get() == this) { // for (Plugin plugin2 : tree.outputs) { // if (plugin2 instanceof MRCAPrior && !calibrations.contains(plugin2)) { // calibrations.add((MRCAPrior) plugin2); // pick up constraints in m_initial tree for (final Object plugin : getOutputs()) { if (plugin instanceof MRCAPrior && !calibrations.contains(plugin) ) { calibrations.add((MRCAPrior) plugin); } } if (m_initial.get() != null) { for (final Object plugin : m_initial.get().getOutputs()) { if (plugin instanceof MRCAPrior && !calibrations.contains(plugin)) { calibrations.add((MRCAPrior) plugin); } } } for (final MRCAPrior prior : calibrations) { final TaxonSet taxonSet = prior.taxonsetInput.get(); if (taxonSet != null && !prior.onlyUseTipsInput.get()) { final Set<String> bTaxa = new HashSet<>(); if (taxonSet.asStringList() == null) { taxonSet.initAndValidate(); } for (final String sTaxonID : taxonSet.asStringList()) { if (!sTaxa.contains(sTaxonID)) { throw new Exception("Taxon <" + sTaxonID + "> could not be found in list of taxa. Choose one of " + sTaxa.toArray(new String[0])); } bTaxa.add(sTaxonID); } final ParametricDistribution distr = prior.distInput.get(); final Bound bounds = new Bound(); if (distr != null) { List<BEASTInterface> plugins = new ArrayList<BEASTInterface>(); distr.getPredecessors(plugins); for (int i = plugins.size() - 1; i >= 0 ; i plugins.get(i).initAndValidate(); } bounds.lower = distr.inverseCumulativeProbability(0.0) + distr.offsetInput.get(); bounds.upper = distr.inverseCumulativeProbability(1.0) + distr.offsetInput.get(); } if (prior.isMonophyleticInput.get()) { // add any monophyletic constraint taxonSets.add(lastMonophyletic, bTaxa); distributions.add(lastMonophyletic, distr); m_bounds.add(lastMonophyletic, bounds); taxonSetIDs.add(prior.getID()); lastMonophyletic++; } else { // only calibrations with finite bounds are added if (!Double.isInfinite(bounds.lower) || !Double.isInfinite(bounds.upper)) { taxonSets.add(bTaxa); distributions.add(distr); m_bounds.add(bounds); taxonSetIDs.add(prior.getID()); } } } } // assume all calibration constraints are MonoPhyletic // TODO: verify that this is a reasonable assumption lastMonophyletic = taxonSets.size(); // sort constraints such that if taxon set i is subset of taxon set j, then i < j for (int i = 0; i < lastMonophyletic; i++) { for (int j = i + 1; j < lastMonophyletic; j++) { Set<String> intersection = new HashSet<>(taxonSets.get(i)); intersection.retainAll(taxonSets.get(j)); if (intersection.size() > 0) { final boolean bIsSubset = taxonSets.get(i).containsAll(taxonSets.get(j)); final boolean bIsSubset2 = taxonSets.get(j).containsAll(taxonSets.get(i)); // sanity check: make sure either // o taxonset1 is subset of taxonset2 OR // o taxonset1 is superset of taxonset2 OR // o taxonset1 does not intersect taxonset2 if (!(bIsSubset || bIsSubset2)) { throw new Exception("333: Don't know how to generate a Random Tree for taxon sets that intersect, " + "but are not inclusive. Taxonset " + taxonSetIDs.get(i) + " and " + taxonSetIDs.get(j)); } // swap i & j if b1 subset of b2 if (bIsSubset) { swap(taxonSets, i, j); swap(distributions, i, j); swap(m_bounds, i, j); swap(taxonSetIDs, i, j); } } } } // build tree of mono constraints such that j is parent of i if i is a subset of j but i+1,i+2,...,j-1 are not. // The last one, standing for the virtual "root" of all monophyletic clades is not associated with an actual clade final int[] nParent = new int[lastMonophyletic]; children = new List[lastMonophyletic + 1]; for (int i = 0; i < lastMonophyletic + 1; i++) { children[i] = new ArrayList<Integer>(); } for (int i = 0; i < lastMonophyletic; i++) { int j = i + 1; while (j < lastMonophyletic && !taxonSets.get(j).containsAll(taxonSets.get(i))) { j++; } nParent[i] = j; children[j].add(i); } // make sure upper bounds of a child does not exceed the upper bound of its parent for (int i = lastMonophyletic-1; i >= 0 ;--i) { if (nParent[i] < lastMonophyletic ) { if (m_bounds.get(i).upper > m_bounds.get(nParent[i]).upper) { m_bounds.get(i).upper = m_bounds.get(nParent[i]).upper - 1e-100; } } } final PopulationFunction popFunction = populationFunctionInput.get(); simulateTree(sTaxa, popFunction); if (rootHeightInput.get() != null) { scaleToFit(rootHeightInput.get() / root.getHeight(), root); } nodeCount = 2 * sTaxa.size() - 1; internalNodeCount = sTaxa.size() - 1; leafNodeCount = sTaxa.size(); HashMap<String,Integer> taxonToNR = null; // preserve node numbers where possible if (m_initial.get() != null) { if( leafNodeCount == m_initial.get().getLeafNodeCount() ) { // dont ask me how the initial tree is rubbish (i.e. 0:0.0) taxonToNR = new HashMap<>(); for (Node n : m_initial.get().getExternalNodes()) { taxonToNR.put(n.getID(), n.getNr()); } } } else { taxonToNR = new HashMap<>(); String[] taxa = getTaxaNames(); for(int k = 0; k < taxa.length; ++k) { taxonToNR.put(taxa[k], k); } } // multiple simulation tries may produce an excess of nodes with invalid nr's. reset those. setNodesNrs(root, 0, new int[1], taxonToNR); initArrays(); if (m_initial.get() != null) { m_initial.get().assignFromWithoutID(this); } for(int k = 0; k < lastMonophyletic; ++k) { final MRCAPrior p = calibrations.get(k); if( p.isMonophyleticInput.get() ) { final TaxonSet taxonSet = p.taxonsetInput.get(); final Set<String> bTaxa = new HashSet<>(); bTaxa.addAll(taxonSet.asStringList()); int c = traverse(root, bTaxa, taxonSet.getTaxonCount(), new int[1]); boolean b = c == nrOfTaxa + 127; } } } private int setNodesNrs(final Node node, int internalNodeCount, int[] n, Map<String,Integer> initial) { if( node.isLeaf() ) { if( initial != null ) { node.setNr(initial.get(node.getID())); } else { node.setNr(n[0]); n[0] += 1; } } else { for (final Node child : node.getChildren()) { internalNodeCount = setNodesNrs(child, internalNodeCount, n, initial); } node.setNr(nrOfTaxa + internalNodeCount); internalNodeCount += 1; } return internalNodeCount; } private void scaleToFit(double scale, Node node) { if (!node.isLeaf()) { double oldHeight = node.getHeight(); node.height *= scale; final Integer iConstraint = getDistrConstraint(node); if (iConstraint != null) { if (node.height < m_bounds.get(iConstraint).lower || node.height > m_bounds.get(iConstraint).upper) { //revert scaling node.height = oldHeight; return; } } scaleToFit(scale, node.getLeft()); scaleToFit(scale, node.getRight()); if (node.height < Math.max(node.getLeft().getHeight(), node.getRight().getHeight())) { // this can happen if a child node is constrained and the default tree is higher than desired node.height = 1.0000001 * Math.max(node.getLeft().getHeight(), node.getRight().getHeight()); } } } //@Override public void getInitialisedStateNodes(final List<StateNode> stateNodes) { stateNodes.add(m_initial.get()); } /** * Simulates a coalescent tree, given a taxon list. * * @param taxa the set of taxa to simulate a coalescent tree between * @param demoFunction the demographic function to use */ public void simulateTree(final Set<String> taxa, final PopulationFunction demoFunction) { if (taxa.size() == 0) return; for (int attempts = 0; attempts < 1000; ++attempts) { try { nextNodeNr = nrOfTaxa; final Set<Node> candidates = new HashSet<>(); int i = 0; for (String taxon : taxa) { final Node node = new Node(); node.setNr(i); node.setID(taxon); node.setHeight(0.0); candidates.add(node); i += 1; } if (m_initial.get() != null) { processCandidateTraits(candidates, m_initial.get().m_traitList.get()); } else { processCandidateTraits(candidates, m_traitList.get()); } final Map<String,Node> allCandidates = new TreeMap<String,Node>(); for (Node node: candidates) { allCandidates.put(node.getID(),node); } root = simulateCoalescent(lastMonophyletic, allCandidates, candidates, demoFunction); return; } catch (ConstraintViolatedException e) { // need to generate another tree Log.warning.println("WARNING: Generating a random tree did not succeed. The most common reasons are:"); Log.warning.println("WARNING: 1. there are conflicting monophyletic constraints, for example if both (A,B) " + "and (B,C) must be monophyletic no tree will be able to meet these constraints at the same " + "time. To fix this, carefully check all clade sets, especially the ones that are expected to " + "be nested clades."); Log.warning.println("WARNING: 2. clade heights are constrained by an upper and lower bound, but the population size " + "is too large, so it is very unlikely a generated treed does not violate these constraints. To " + "fix this you can try to reduce the popultion size of the population model."); Log.warning.println("WARNING: Expect BEAST to crash if this is not fixed."); } } throw new RuntimeException("Failed to generate a random tree (probably a bug)."); } /** * Apply traits to a set of nodes. * @param candidates List of nodes * @param traitSets List of TraitSets to apply */ private void processCandidateTraits(Set<Node> candidates, List<TraitSet> traitSets) { for (TraitSet traitSet : traitSets) { for (Node node : candidates) { node.setMetaData(traitSet.getTraitName(), traitSet.getValue(node.getID())); } } } private Node simulateCoalescent(final int iIsMonophyleticNode, final Map<String,Node> allCandidates, final Set<Node> candidates, final PopulationFunction demoFunction) throws ConstraintViolatedException { final List<Node> remainingCandidates = new ArrayList<Node>(); final Set<String> taxaDone = new TreeSet<>(); for (final int iMonoNode : children[iIsMonophyleticNode]) { // create list of leaf nodes for this monophyletic MRCA final Set<Node> candidates2 = new HashSet<>(); final Set<String> bTaxonSet = taxonSets.get(iMonoNode); for (String taxon : bTaxonSet) { candidates2.add(allCandidates.get(taxon)); } final Node MRCA = simulateCoalescent(iMonoNode, allCandidates, candidates2, demoFunction); remainingCandidates.add(MRCA); taxaDone.addAll(bTaxonSet); } for (final Node node : candidates) { if (!taxaDone.contains(node.getID())) { remainingCandidates.add(node); } } final double upper = iIsMonophyleticNode < m_bounds.size() ? m_bounds.get(iIsMonophyleticNode).upper : Double.POSITIVE_INFINITY; final Node MRCA = simulateCoalescentWithMax(remainingCandidates, demoFunction, upper); return MRCA; } /** * @param id the id to match * @param nodes a list of nodes * @return the node with the matching id; */ private Node getNodeById(String id, List<Node> nodes) { for (Node node : nodes) { if (node.getID().equals(id)) return node; } return null; } /** * @param nodes * @param demographic * @return the root node of the given array of nodes after simulation of the * coalescent under the given demographic model. * @throws beast.evolution.tree.RandomTree.ConstraintViolatedException */ // public Node simulateCoalescent(final List<Node> nodes, final PopulationFunction demographic) throws ConstraintViolatedException { // return simulateCoalescentWithMax(nodes, demographic, Double.POSITIVE_INFINITY); /** * @param nodes * @param demographic * @return the root node of the given array of nodes after simulation of the * coalescent under the given demographic model. * @throws beast.evolution.tree.RandomTree.ConstraintViolatedException */ public Node simulateCoalescentWithMax(final List<Node> nodes, final PopulationFunction demographic, final double maxHeight) throws ConstraintViolatedException { // sanity check - disjoint trees // if( ! Tree.Utils.allDisjoint(nodes) ) { // throw new RuntimeException("non disjoint trees"); if (nodes.size() == 0) { throw new IllegalArgumentException("empty nodes set"); } for (int attempts = 0; attempts < 1000; ++attempts) { final List<Node> rootNode = simulateCoalescent(nodes, demographic, 0.0, maxHeight); if (rootNode.size() == 1) { return rootNode.get(0); } } if( Double.isFinite(maxHeight) ){ double h = -1; for( Node n : nodeList ) { h = Math.max(h, n.getHeight()); } assert h < maxHeight; double dt = (maxHeight - h)/ (nodeList.size() + 1); while (nodeList.size() > 1) { int k = nodeList.size() - 1; final Node left = nodeList.remove(k); final Node right = nodeList.get(k-1); final Node newNode = new Node(); newNode.setNr(nextNodeNr++); // multiple tries may generate an excess of nodes assert(nextNodeNr <= nrOfTaxa*2-1); newNode.setHeight(h + dt); newNode.setLeft(left); left.setParent(newNode); newNode.setRight(right); right.setParent(newNode); nodeList.set(k-1, newNode); } assert (nodeList.size() == 1); return nodeList.get(0); } throw new RuntimeException("failed to merge trees after 1000 tries!"); } public List<Node> simulateCoalescent(final List<Node> nodes, final PopulationFunction demographic, double currentHeight, final double maxHeight) throws ConstraintViolatedException { // If only one node, return it // continuing results in an infinite loop if (nodes.size() == 1) return nodes; final double[] heights = new double[nodes.size()]; for (int i = 0; i < nodes.size(); i++) { heights[i] = nodes.get(i).getHeight(); } final int[] indices = new int[nodes.size()]; HeapSort.sort(heights, indices); // node list nodeList.clear(); activeNodeCount = 0; for (int i = 0; i < nodes.size(); i++) { nodeList.add(nodes.get(indices[i])); } setCurrentHeight(currentHeight); // get at least two tips while (getActiveNodeCount() < 2) { currentHeight = getMinimumInactiveHeight(); setCurrentHeight(currentHeight); } // simulate coalescent events double nextCoalescentHeight = currentHeight + PopulationFunction.Utils.getSimulatedInterval(demographic, getActiveNodeCount(), currentHeight); // while (nextCoalescentHeight < maxHeight && (getNodeCount() > 1)) { while (nextCoalescentHeight < maxHeight && (nodeList.size() > 1)) { if (nextCoalescentHeight >= getMinimumInactiveHeight()) { currentHeight = getMinimumInactiveHeight(); setCurrentHeight(currentHeight); } else { currentHeight = coalesceTwoActiveNodes(currentHeight, nextCoalescentHeight); } // if (getNodeCount() > 1) { if (nodeList.size() > 1) { // get at least two tips while (getActiveNodeCount() < 2) { currentHeight = getMinimumInactiveHeight(); setCurrentHeight(currentHeight); } // nextCoalescentHeight = currentHeight + // DemographicFunction.Utils.getMedianInterval(demographic, // getActiveNodeCount(), currentHeight); nextCoalescentHeight = currentHeight + PopulationFunction.Utils.getSimulatedInterval(demographic, getActiveNodeCount(), currentHeight); } } return nodeList; } /** * @return the height of youngest inactive node. */ private double getMinimumInactiveHeight() { if (activeNodeCount < nodeList.size()) { return (nodeList.get(activeNodeCount)).getHeight(); } else return Double.POSITIVE_INFINITY; } /** * Set the current height. * @param height */ private void setCurrentHeight(final double height) { while (getMinimumInactiveHeight() <= height) { activeNodeCount += 1; } } /** * @return the number of active nodes (equate to lineages) */ private int getActiveNodeCount() { return activeNodeCount; } // /** // * @return the total number of nodes both active and inactive // */ // private int getNodeCount() { // return nodeList.size(); /** * Coalesce two nodes in the active list. This method removes the two * (randomly selected) active nodes and replaces them with the new node at * the top of the active list. * @param fMinHeight * @param height * @return */ private double coalesceTwoActiveNodes(final double fMinHeight, double height) throws ConstraintViolatedException { final int node1 = Randomizer.nextInt(activeNodeCount); int node2 = node1; while (node2 == node1) { node2 = Randomizer.nextInt(activeNodeCount); } final Node left = nodeList.get(node1); final Node right = nodeList.get(node2); final Node newNode = new Node(); // System.err.println(2 * m_taxa.get().getNrTaxa() - nodeList.size()); newNode.setNr(nextNodeNr++); // multiple tries may generate an excess of nodes assert(nextNodeNr <= nrOfTaxa*2-1); newNode.setHeight(height); newNode.setLeft(left); left.setParent(newNode); newNode.setRight(right); right.setParent(newNode); nodeList.remove(left); nodeList.remove(right); activeNodeCount -= 2; nodeList.add(activeNodeCount, newNode); activeNodeCount += 1; // check if there is a calibration on this node final Integer iConstraint = getDistrConstraint(newNode); if (iConstraint != null) { // for (int i = 0; i < 1000; i++) { // try { // height = distr.sample(1)[0][0]; // } catch (Exception e) { // e.printStackTrace(); // if (height > fMinHeight) { // break; final double fMin = Math.max(m_bounds.get(iConstraint).lower, fMinHeight); final double fMax = m_bounds.get(iConstraint).upper; if (fMax < fMin) { // failed to draw a matching height from the MRCA distribution // TODO: try to scale rest of tree down throw new ConstraintViolatedException(); } if (height < fMin || height > fMax) { if (fMax == Double.POSITIVE_INFINITY) { height = fMin + 0.1; } else { height = fMin + Randomizer.nextDouble() * (fMax - fMin); } newNode.setHeight(height); } } if (getMinimumInactiveHeight() < height) { throw new RuntimeException( "This should never happen! Somehow the current active node is older than the next inactive node!"); } return height; } private Integer getDistrConstraint(final Node node) { for (int i = 0; i < distributions.size(); i++) { if (distributions.get(i) != null) { final Set<String> taxonSet = taxonSets.get(i); if (traverse(node, taxonSet, taxonSet.size(), new int[1]) == nrOfTaxa + 127) { return i; } } } return null; } int traverse(final Node node, final Set<String> MRCATaxonSet, final int nNrOfMRCATaxa, final int[] nTaxonCount) { if (node.isLeaf()) { nTaxonCount[0]++; if (MRCATaxonSet.contains(node.getID())) { return 1; } else { return 0; } } else { int iTaxons = traverse(node.getLeft(), MRCATaxonSet, nNrOfMRCATaxa, nTaxonCount); final int nLeftTaxa = nTaxonCount[0]; nTaxonCount[0] = 0; if (node.getRight() != null) { iTaxons += traverse(node.getRight(), MRCATaxonSet, nNrOfMRCATaxa, nTaxonCount); final int nRightTaxa = nTaxonCount[0]; nTaxonCount[0] = nLeftTaxa + nRightTaxa; } if (iTaxons == nrOfTaxa + 127) { iTaxons++; } if (iTaxons == nNrOfMRCATaxa) { // we are at the MRCA, return magic nr return nrOfTaxa + 127; } return iTaxons; } } @Override public String[] getTaxaNames() { if (m_sTaxaNames == null) { final List<String> sTaxa; if (taxaInput.get() != null) { sTaxa = taxaInput.get().getTaxaNames(); } else { sTaxa = m_taxonset.get().asStringList(); } m_sTaxaNames = sTaxa.toArray(new String[sTaxa.size()]); } return m_sTaxaNames; } final private ArrayList<Node> nodeList = new ArrayList<Node>(); private int activeNodeCount = 0; }
package de.dakror.spamwars.game.projectile; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Polygon; import java.awt.Rectangle; import java.awt.geom.AffineTransform; import java.awt.geom.Line2D; import org.json.JSONException; import org.json.JSONObject; import de.dakror.gamesetup.util.Drawable; import de.dakror.gamesetup.util.Helper; import de.dakror.gamesetup.util.Vector; import de.dakror.spamwars.game.Game; import de.dakror.spamwars.game.entity.Entity; import de.dakror.spamwars.game.entity.Player; import de.dakror.spamwars.game.world.Tile; import de.dakror.spamwars.util.Assistant; /** * @author Dakror */ public class Projectile implements Drawable { private Vector pos, target; ProjectileType type; private boolean dead; private float rot; public Projectile(Vector pos, Vector target, ProjectileType type) { this.type = type; this.pos = pos; this.target = target; Vector dif = target.clone().sub(pos); dif.setLength(type.getRange()); this.target = pos.clone().add(dif); } public Projectile(JSONObject o) throws JSONException { this(new Vector((float) o.getDouble("x"), (float) o.getDouble("y")), new Vector((float) o.getDouble("tx"), (float) o.getDouble("ty")), ProjectileType.values()[o.getInt("t")]); } public JSONObject serialize() { JSONObject o = new JSONObject(); try { o.put("x", pos.x); o.put("y", pos.y); o.put("tx", target.x); o.put("ty", target.y); o.put("t", type.ordinal()); o.put("d", dead); } catch (JSONException e) { e.printStackTrace(); } return o; } @Override public void draw(Graphics2D g) { if (dead) return; Vector pos = this.pos.clone().add(new Vector(Game.world.x, Game.world.y)); AffineTransform old = g.getTransform(); AffineTransform at = g.getTransform(); at.rotate(rot, pos.x + type.getTex().width / 2, pos.y + type.getTex().height / 2); g.setTransform(at); Helper.drawImage(Game.getImage("weapon/projectiles.png"), (int) pos.x, (int) pos.y, type.getTex().width, type.getTex().height, type.getTex().x, type.getTex().y, type.getTex().width, type.getTex().height, g); g.setTransform(old); } @Override public void update(int tick) { if (dead) return; Vector dif = target.clone().sub(pos); if (dif.getLength() > type.getSpeed()) dif.setLength(type.getSpeed()); rot = (float) Math.toRadians(dif.getAngleOnXAxis()); Vector nextPos = pos.clone().add(dif); Point p = Game.world.getTile((int) nextPos.x, (int) nextPos.y); Tile tile = Tile.values()[Game.world.getTileIdAtPixel((int) nextPos.x, (int) nextPos.y)]; Line2D line = new Line2D.Float(pos.x, pos.y, nextPos.x, nextPos.y); if (tile.getBump() != null) { Rectangle b = (Rectangle) tile.getBump().clone(); b.translate(p.x * Tile.SIZE, p.y * Tile.SIZE); if (b.intersectsLine(line)) { dead = true; return; } } else if (tile.getLeftY() >= 0) { Polygon b = new Polygon(); b.addPoint(0, tile.getLeftY()); b.addPoint(Tile.SIZE, tile.getRightY()); b.addPoint(Tile.SIZE, Tile.SIZE); b.addPoint(0, Tile.SIZE); b.translate(p.x * Tile.SIZE, p.y * Tile.SIZE); if (Assistant.intersection(b, line)) { dead = true; return; } } for (Entity e : Game.world.entities) { if (e instanceof Player && e.getBump(0, 0).intersectsLine(line)) { if (((Player) e).getUser().getUsername().equals(Game.user.getUsername())) Game.player.dealDamage(type.getDamage()); dead = true; return; } } pos.add(dif); if (pos.equals(target)) dead = true; } public ProjectileType getType() { return type; } public boolean isDead() { return dead; } }
package de.lmu.ifi.dbs.elki.math.linearalgebra; import java.util.Arrays; import de.lmu.ifi.dbs.elki.data.NumberVector; import de.lmu.ifi.dbs.elki.utilities.FormatUtil; /** * Provides a vector object that encapsulates an m x 1 - matrix object. * * @author Elke Achtert * * @apiviz.landmark */ public class Vector implements NumberVector<Double> { /** * Array for internal storage of elements. * * @serial internal array storage. */ protected final double[] elements; /** * Error message (in assertions!) when vector dimensionalities do not agree. */ public static final String ERR_VEC_DIMENSIONS = "Vector dimensions do not agree."; /** * Error message (in assertions!) when matrix dimensionalities do not agree. */ public static final String ERR_MATRIX_INNERDIM = "Matrix inner dimensions do not agree."; /** * Error message (in assertions!) when dimensionalities do not agree. */ private static final String ERR_DIMENSIONS = "Dimensionalities do not agree."; /** * Construct a vector from a given array. * * @param values array of doubles */ public Vector(final double... values) { elements = values; } /** * Provides an m x 1 vector. * * @param m the number of rows */ public Vector(final int m) { elements = new double[m]; } /** * Returns a randomly created vector of length 1.0. * * @param dimensionality dimensionality * @return the dimensionality of the vector */ public static final Vector randomNormalizedVector(final int dimensionality) { final Vector v = new Vector(dimensionality); double norm = 0; while (norm <= 0) { for (int i = 0; i < dimensionality; i++) { v.elements[i] = Math.random(); } norm = v.euclideanLength(); } for (int row = 0; row < dimensionality; row++) { v.elements[row] /= norm; } return v; } /** * Returns the ith unit vector of the specified dimensionality. * * @param dimensionality the dimensionality of the vector * @param i the index * @return the ith unit vector of the specified dimensionality */ public static final Vector unitVector(final int dimensionality, final int i) { final Vector v = new Vector(dimensionality); v.elements[i] = 1; return v; } /** * Returns a copy of this vector. * * @return a copy of this vector */ public final Vector copy() { return new Vector(elements.clone()); } @Override public Vector clone() { return this.copy(); } /** * Access the internal two-dimensional array. * * @return Pointer to the two-dimensional array of matrix elements. */ public final double[] getArrayRef() { return elements; } /** * Copy the internal two-dimensional array. * * @return Two-dimensional array copy of matrix elements. */ public final double[] getArrayCopy() { return elements.clone(); } /** * Returns the dimensionality of this vector. * * @return the dimensionality of this vector */ @Override public final int getDimensionality() { return elements.length; } /** * Returns the value at the specified row. * * @param i the row index * @return the value at row i */ public final double get(final int i) { return elements[i]; } /** * Sets the value at the specified row. * * @param i the row index * @param value the value to be set * * @return the modified vector */ public final Vector set(final int i, final double value) { elements[i] = value; return this; } /** * Returns a new vector which is the result of this vector plus the specified * vector. * * @param v the vector to be added * @return the resulting vector */ public final Vector plus(final Vector v) { assert (this.elements.length == v.elements.length) : ERR_VEC_DIMENSIONS; final Vector result = new Vector(elements.length); for (int i = 0; i < elements.length; i++) { result.elements[i] = elements[i] + v.elements[i]; } return result; } /** * Returns a new vector which is the result of this vector plus the specified * vector times the given factor. * * @param v the vector to be added * @param s the scalar * @return the resulting vector */ public final Vector plusTimes(final Vector v, final double s) { assert (this.elements.length == v.elements.length) : ERR_VEC_DIMENSIONS; final Vector result = new Vector(elements.length); for (int i = 0; i < elements.length; i++) { result.elements[i] = elements[i] + v.elements[i] * s; } return result; } /** * a = a + b. * * @param b another vector * @return a + b in this vector */ public final Vector plusEquals(final Vector b) { assert (this.elements.length == b.elements.length) : ERR_VEC_DIMENSIONS; for (int i = 0; i < elements.length; i++) { elements[i] += b.elements[i]; } return this; } /** * a = a + s * b. * * @param b another vector * @param s Scalar * @return a + s * b in this vector */ public final Vector plusTimesEquals(final Vector b, final double s) { assert (this.elements.length == b.elements.length) : ERR_VEC_DIMENSIONS; for (int i = 0; i < elements.length; i++) { elements[i] += s * b.elements[i]; } return this; } /** * Add a constant value to all dimensions. * * @param d Value to add * @return Modified vector */ public final Vector plusEquals(final double d) { for (int i = 0; i < elements.length; i++) { elements[i] += d; } return this; } /** * Returns this vector minus the specified vector v. * * @param v the vector to be subtracted from this vector * @return this vector minus the specified vector v */ public final Vector minus(final Vector v) { final Vector sub = new Vector(elements.length); for (int i = 0; i < elements.length; i++) { sub.elements[i] = elements[i] - v.elements[i]; } return sub; } /** * Returns this vector minus the specified vector v times s. * * @param v the vector to be subtracted from this vector * @param s the scaling factor * @return this vector minus the specified vector v */ public final Vector minusTimes(final Vector v, final double s) { final Vector sub = new Vector(elements.length); for (int i = 0; i < elements.length; i++) { sub.elements[i] = elements[i] - v.elements[i] * s; } return sub; } /** * a = a - b. * * @param b another vector * @return a - b in this vector */ public final Vector minusEquals(final Vector b) { assert (this.elements.length == b.elements.length) : ERR_VEC_DIMENSIONS; for (int i = 0; i < elements.length; i++) { elements[i] -= b.elements[i]; } return this; } /** * a = a - s * b. * * @param b another vector * @param s Scalar * @return a - s * b in this vector */ public final Vector minusTimesEquals(final Vector b, final double s) { assert (this.elements.length == b.elements.length) : ERR_VEC_DIMENSIONS; for (int i = 0; i < elements.length; i++) { elements[i] -= s * b.elements[i]; } return this; } /** * Subtract a constant value from all dimensions. * * @param d Value to subtract * @return Modified vector */ public final Vector minusEquals(final double d) { for (int i = 0; i < elements.length; i++) { elements[i] -= d; } return this; } /** * Returns a new vector which is the result of this vector multiplied by the * specified scalar. * * @param s the scalar to be multiplied * @return the resulting vector */ public final Vector times(final double s) { final Vector v = new Vector(elements.length); for (int i = 0; i < elements.length; i++) { v.elements[i] = elements[i] * s; } return v; } /** * Multiply a matrix by a scalar in place, A = s*A. * * @param s scalar * @return replace A by s*A */ public final Vector timesEquals(final double s) { for (int i = 0; i < elements.length; i++) { elements[i] *= s; } return this; } /** * Linear algebraic matrix multiplication, A * B. * * @param B another matrix * @return Matrix product, A * B */ public final Matrix times(final Matrix B) { assert (B.elements.length == 1) : ERR_MATRIX_INNERDIM; final Matrix X = new Matrix(this.elements.length, B.columndimension); for (int j = 0; j < B.columndimension; j++) { for (int i = 0; i < this.elements.length; i++) { X.elements[i][j] = elements[i] * B.elements[0][j]; } } return X; } /** * Linear algebraic matrix multiplication, A<sup>T</sup> * B. * * @param B another matrix * @return Matrix product, A<sup>T</sup> * B */ public final Matrix transposeTimes(final Matrix B) { assert (B.elements.length == this.elements.length) : ERR_MATRIX_INNERDIM; final Matrix X = new Matrix(1, B.columndimension); for (int j = 0; j < B.columndimension; j++) { // multiply it with each row from A double s = 0; for (int k = 0; k < this.elements.length; k++) { s += this.elements[k] * B.elements[k][j]; } X.elements[0][j] = s; } return X; } /** * Linear algebraic matrix multiplication, a<sup>T</sup> * B * c. * * @param B matrix * @param c vector on the right * @return Matrix product, a<sup>T</sup> * B * c */ public final double transposeTimesTimes(final Matrix B, final Vector c) { assert (B.elements.length == this.elements.length) : ERR_MATRIX_INNERDIM; double sum = 0.0; for (int j = 0; j < B.columndimension; j++) { // multiply it with each row from A double s = 0; for (int k = 0; k < this.elements.length; k++) { s += this.elements[k] * B.elements[k][j]; } sum += s * c.elements[j]; } return sum; } /** * Linear algebraic matrix multiplication, A<sup>T</sup> * B. * * @param B another vector * @return Matrix product, A<sup>T</sup> * B */ public final double transposeTimes(final Vector B) { assert (B.elements.length == this.elements.length) : ERR_MATRIX_INNERDIM; double s = 0; for (int k = 0; k < this.elements.length; k++) { s += this.elements[k] * B.elements[k]; } return s; } /** * Linear algebraic matrix multiplication, A * B^T. * * @param B another matrix * @return Matrix product, A * B^T */ public final Matrix timesTranspose(final Matrix B) { assert (B.columndimension == 1) : ERR_MATRIX_INNERDIM; final Matrix X = new Matrix(this.elements.length, B.elements.length); for (int j = 0; j < B.elements.length; j++) { for (int i = 0; i < this.elements.length; i++) { X.elements[i][j] = elements[i] * B.elements[j][0]; } } return X; } /** * Linear algebraic matrix multiplication, A * B^T. * * @param B another matrix * @return Matrix product, A * B^T */ public final Matrix timesTranspose(final Vector B) { final Matrix X = new Matrix(this.elements.length, B.elements.length); for (int j = 0; j < B.elements.length; j++) { for (int i = 0; i < this.elements.length; i++) { X.elements[i][j] = elements[i] * B.elements[j]; } } return X; } /** * Returns the length of this vector. * * @return the length of this vector */ public final double euclideanLength() { double acc = 0.0; for (int row = 0; row < elements.length; row++) { final double v = elements[row]; acc += v * v; } return Math.sqrt(acc); } /** * Normalizes this vector to the length of 1.0. * * @return this vector */ public final Vector normalize() { double norm = euclideanLength(); if (norm != 0) { for (int row = 0; row < elements.length; row++) { elements[row] /= norm; } } return this; } /** * Projects this row vector into the subspace formed by the specified matrix * v. * * @param v the subspace matrix * @return the projection of p into the subspace formed by v */ public final Vector projection(final Matrix v) { assert (elements.length == v.elements.length) : ERR_DIMENSIONS; Vector sum = new Vector(elements.length); for (int i = 0; i < v.columndimension; i++) { // TODO: optimize - copy less? Vector v_i = v.getCol(i); sum.plusTimesEquals(v_i, this.transposeTimes(v_i)); } return sum; } @Override public int hashCode() { return Arrays.hashCode(this.elements); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Vector other = (Vector) obj; if (this.elements.length != other.elements.length) { return false; } return Arrays.equals(this.elements, other.elements); } /** * Returns a string representation of this vector. * * @return a string representation of this vector. */ @Override public final String toString() { return FormatUtil.format(this); } /** * Returns a string representation of this vector without adding extra * whitespace. * * @return a string representation of this vector. */ public final String toStringNoWhitespace() { return "[" + FormatUtil.format(elements, ",") + "]"; } /** * Reset the Vector to 0. */ public void setZero() { Arrays.fill(elements, 0.0); } /** * Rotate vector by 90 degrees. * * @return self, for operation chaining. */ public Vector rotate90Equals() { assert (elements.length == 2); double temp = elements[0]; elements[0] = elements[1]; elements[1] = -temp; return this; } /** * Cross product for 3d vectors, i.e. <code>this x other</code> * * @param other Other vector * @return Cross product of this vector and the other vector */ public Vector cross3D(Vector other) { assert (elements.length == 3 && other.elements.length == 3); Vector out = new Vector(3); out.elements[0] = (elements[1] * other.elements[2]) - (elements[2] * other.elements[1]); out.elements[1] = (elements[2] * other.elements[0]) - (elements[0] * other.elements[2]); out.elements[2] = (elements[0] * other.elements[1]) - (elements[1] * other.elements[0]); return out; } // ////// NumberVector API. A bit hackish. :-( @Override public double getMin(int dimension) { return elements[dimension]; } @Override public double getMax(int dimension) { return elements[dimension]; } @Override @Deprecated public Double getValue(int dimension) { return Double.valueOf(elements[dimension]); } @Override public double doubleValue(int dimension) { return elements[dimension]; } @Override public float floatValue(int dimension) { return (float) elements[dimension]; } @Override public int intValue(int dimension) { return (int) elements[dimension]; } @Override public long longValue(int dimension) { return (long) elements[dimension]; } @Override public short shortValue(int dimension) { return (short) elements[dimension]; } @Override public byte byteValue(int dimension) { return (byte) elements[dimension]; } @Override public Vector getColumnVector() { return copy(); } }
import java.util.*; public class Graph { private Map<Integer, Set<Integer>> arcs = new HashMap<>(); private Random rand = new Random(); public Graph() {} public Graph(int n) { for(int i = 0; i < n; i++) { for(int j = 0; j < rand.nextInt(n); j++) { addArc(i+1, rand.nextInt(n)+1); } } } public void addArc(int startIndex, int endIndex) { // No arcs to itself if(startIndex == endIndex) { return; } // Must be a directed graph if(arcs.get(endIndex) != null) { if(arcs.get(endIndex).contains(startIndex)) { return; } } Set<Integer> endNodes = arcs.get(startIndex); if (endNodes == null) { endNodes = new TreeSet<>(); } endNodes.add(endIndex); arcs.put(startIndex, endNodes); } public void print() { for(Map.Entry<Integer,Set<Integer>> entry : arcs.entrySet()) { System.out.print(entry.getKey() + " -> "); for(Integer value : entry.getValue()) { System.out.print(value + " "); } System.out.println(); } } public boolean infPath(int startNode) { return findEndNodesRecursive(startNode, new ArrayList<>(), new ArrayList<>()); } private boolean findEndNodesRecursive (int startNode, List<Integer> checkedNodes, List<Integer> deadEnds) { if (checkedNodes.contains(startNode)) { return true; } // Create a copy of the currently checked nodes // Java passes arguments by reference List<Integer> clonedNodes = new ArrayList<>(); for(Integer node : checkedNodes) { clonedNodes.add(node); } clonedNodes.add(startNode); // TODO resolve if checking for deadEnds is efficient or not // If startNode has arcs if (arcs.containsKey(startNode)) { // Iterate all arcs for(Integer endNode : arcs.get(startNode)) { if(deadEnds.contains(endNode)) { return false; } if (findEndNodesRecursive(endNode, clonedNodes, deadEnds)) { return true; } else { deadEnds.add(endNode); } } } return false; } }
public class Hello { public static void main(String[] args) { System.out.println(" "); } }
package com.inepex.ineom.shared; import java.util.Map; public abstract class PropHandler { public abstract void setProp(HasProp hasProp, String group, String key, Boolean value); public abstract void setProp(HasProp hasProp, String group, String key, Double value); public abstract void setProp(HasProp hasProp, String group, String key, String value, boolean strictMatch); public abstract void setProp(HasProp hasProp, String group, String key, String value); public abstract Boolean getBooleanProp(HasProp hasProp, String group, String key); public abstract Double getNumberProp(HasProp hasProp, String group, String key); public abstract String getStringProp(HasProp hasProp, String group, String key); public abstract String getStringPropFromGroupJson(String key, String json); public abstract Boolean getBooleanPropFromGroupJson(String key, String json); public abstract Double getNumberPropFromGroupJson(String key, String json); public abstract Map<String, Object> getPropMap(HasProp hasProp, String id); public abstract void setProp(HasProp o, String group, String key, Object value); public static String getStrictMatchKey(String key) { return "#" + key; } public void mergeProps(HasProp from, HasProp to) { for (String id : from.getAllPropsJson().keySet()) { String toPropsJson = to.getPropsJson(id); if (toPropsJson == null) { to.getAllPropsJson().put(id, from.getPropsJson(id)); } else { Map<String, Object> propMap = getPropMap(from, id); for (String key : propMap.keySet()) { setProp(to, id, key, propMap.get(key)); } } } } public String getStringPropOverride( String group, String key, HasProp tracker, HasProp user, String defaultValue) { String value = defaultValue; String trackerValue = null; if (tracker != null) { trackerValue = getStringProp(tracker, group, key); } if (trackerValue != null && !trackerValue.equals("")) { value = trackerValue; } else { String userValue = getStringProp(user, group, key); if (userValue != null && !userValue.equals("")) { value = userValue; } } return value; } }
package edu.mit.streamjit.impl.common; import static com.google.common.base.Preconditions.checkNotNull; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import edu.mit.streamjit.api.CompiledStream; import edu.mit.streamjit.api.StreamCompilationFailedException; import edu.mit.streamjit.api.Worker; import edu.mit.streamjit.impl.blob.Blob; import edu.mit.streamjit.impl.blob.Blob.Token; import edu.mit.streamjit.impl.concurrent.ConcurrentStreamCompiler; import edu.mit.streamjit.impl.distributed.DistributedStreamCompiler; /** * Abstract drainer is to perform draining on a stream application. Both * {@link DistributedStreamCompiler} and {@link ConcurrentStreamCompiler} may * extends this to implement the draining on their particular context. Works * coupled with {@link BlobNode} and {@link BlobGraph}. * * @author Sumanan sumanan@mit.edu * @since Jul 30, 2013 */ public abstract class AbstractDrainer { /** * Blob graph of the stream application that needs to be drained. */ protected BlobGraph blobGraph; private CountDownLatch latch; private AtomicInteger unDrainedNodes; /** * State of the drainer. */ private DrainerState state; public AbstractDrainer() { state = DrainerState.NODRAINING; } /** * Sets the blobGraph that is in execution. When * {@link #startDraining(boolean)} is called, abstract drainer will traverse * through the blobgraph and drain the stream application. * * @param blobGraph */ public final void setBlobGraph(BlobGraph blobGraph) { if (state == DrainerState.NODRAINING) { this.blobGraph = blobGraph; unDrainedNodes = new AtomicInteger(blobGraph.getBlobIds().size()); latch = new CountDownLatch(1); blobGraph.setDrainer(this); } else { throw new RuntimeException("Drainer is in draing mode."); } } /** * Initiate the draining of the blobgraph. */ public final void startDraining(boolean isFinal) { if (state == DrainerState.NODRAINING) { if (isFinal) this.state = DrainerState.FINAL; else this.state = DrainerState.INTERMEDIATE; blobGraph.getSourceBlobNode().drain(); } else { throw new RuntimeException("Drainer is in draing mode."); } } /** * Once draining of a blob is done, it has to inform to the drainer by * calling this method. */ public final void drained(Token blobID) { blobGraph.getBlobNode(blobID).drained(); } /** * @return true iff draining of the stream application is finished. See * {@link CompiledStream#isDrained()} for more details. */ public final boolean isDrained() { return latch.getCount() == 0; } /** * See {@link CompiledStream#awaitDrained()} for more details. */ public final void awaitDrained() throws InterruptedException { latch.await(); } /** * See {@link CompiledStream#awaitDrained(long, TimeUnit)} for more details. */ public final void awaitDrained(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { latch.await(timeout, unit); } /** * Once a {@link BlobNode}'s all preconditions are satisfied for draining, * blob node will call this function drain the blob. * * @param node */ protected abstract void drain(Token blobID, boolean isFinal); /** * {@link AbstractDrainer} will call this function after the corresponding * blob is drained. Sub classes may implement blob related resource cleanup * jobs here ( e.g., stop blob threads). * * @param blobID */ protected abstract void drainingDone(Token blobID); /** * {@link AbstractDrainer} will call this function after the draining * process is complete. This can be used to do the final cleanups ( e.g, All * data in the tail buffer should be consumed before this function returns.) * After the return of this function, isDrained() will start to return true * and any threads waiting at awaitdraining() will be released. */ protected abstract void drainingDone(); /** * {@link BlobNode}s have to call this function to inform draining done * event. * * @param blobNode */ private void drainingDone(BlobNode blobNode) { assert state != DrainerState.NODRAINING : "Illegal call. Drainer is not in draining mode."; drainingDone(blobNode.blobID); if (unDrainedNodes.decrementAndGet() == 0) { drainingDone(); state = DrainerState.NODRAINING; latch.countDown(); } } /** * BlobGraph builds predecessor successor relationship for set of * partitioned workers, and verifies for cyclic dependencies among the * partitions. Blob graph doesn't keep blobs. Instead it keeps * {@link BlobNode} that represents blobs. </p> All BlobNodes in the graph * can be retrieved and used in coupled with {@link AbstractDrainer} to * successfully perform draining process. * * @author Sumanan sumanan@mit.edu * @since Jul 30, 2013 */ public static class BlobGraph { /** * All nodes in the graph. */ private final ImmutableMap<Token, BlobNode> blobNodes; /** * The blob which has the overall stream input. */ private final BlobNode sourceBlobNode; public BlobGraph(List<Set<Worker<?, ?>>> partitionWorkers) { checkNotNull(partitionWorkers); Set<DummyBlob> blobSet = new HashSet<>(); for (Set<Worker<?, ?>> workers : partitionWorkers) { blobSet.add(new DummyBlob(workers)); } ImmutableMap.Builder<Token, BlobNode> builder = new ImmutableMap.Builder<>(); for (DummyBlob b : blobSet) { builder.put(b.id, new BlobNode(b.id)); } this.blobNodes = builder.build(); for (DummyBlob cur : blobSet) { for (DummyBlob other : blobSet) { if (cur == other) continue; if (Sets.intersection(cur.outputs, other.inputs).size() != 0) { BlobNode curNode = blobNodes.get(cur.id); BlobNode otherNode = blobNodes.get(other.id); curNode.addSuccessor(otherNode); otherNode.addPredecessor(curNode); } } } checkCycles(blobNodes.values()); BlobNode sourceBlob = null; for (BlobNode bn : blobNodes.values()) { if (bn.getDependencyCount() == 0) { assert sourceBlob == null : "Multiple independent blobs found."; sourceBlob = bn; } } checkNotNull(sourceBlob); this.sourceBlobNode = sourceBlob; } /** * @return BlobIds of all blobnodes in the blobgraph. */ public ImmutableSet<Token> getBlobIds() { return blobNodes.keySet(); } public BlobNode getBlobNode(Token blobID) { return blobNodes.get(blobID); } /** * A Drainer can be set to the {@link BlobGraph} to perform draining. * * @param drainer */ public void setDrainer(AbstractDrainer drainer) { for (BlobNode bn : blobNodes.values()) { bn.setDrainer(drainer); } } /** * @return the sourceBlobNode */ private BlobNode getSourceBlobNode() { return sourceBlobNode; } /** * Does a depth first traversal to detect cycles in the graph. * * @param blobNodes */ private void checkCycles(Collection<BlobNode> blobNodes) { Map<BlobNode, Color> colorMap = new HashMap<>(); for (BlobNode b : blobNodes) { colorMap.put(b, Color.WHITE); } for (BlobNode b : blobNodes) { if (colorMap.get(b) == Color.WHITE) if (DFS(b, colorMap)) throw new StreamCompilationFailedException( "Cycles found among blobs"); } } /** * A cycle exits in a directed graph if a back edge is detected during a * DFS traversal. A back edge exists in a directed graph if the * currently explored vertex has an adjacent vertex that was already * colored gray * * @param vertex * @param colorMap * @return <code>true</code> if cycle found, <code>false</code> * otherwise. */ private boolean DFS(BlobNode vertex, Map<BlobNode, Color> colorMap) { colorMap.put(vertex, Color.GRAY); for (BlobNode adj : vertex.getSuccessors()) { if (colorMap.get(adj) == Color.GRAY) return true; if (colorMap.get(adj) == Color.WHITE) if (DFS(adj, colorMap)) return true; } colorMap.put(vertex, Color.BLACK); return false; } /** * Just used to build the input and output tokens of a partitioned blob * workers. imitate a {@link Blob}. */ private final class DummyBlob { private final ImmutableSet<Token> inputs; private final ImmutableSet<Token> outputs; private final Token id; private DummyBlob(Set<Worker<?, ?>> workers) { ImmutableSet.Builder<Token> inputBuilder = new ImmutableSet.Builder<>(); ImmutableSet.Builder<Token> outputBuilder = new ImmutableSet.Builder<>(); for (IOInfo info : IOInfo.externalEdges(workers)) { (info.isInput() ? inputBuilder : outputBuilder).add(info .token()); } inputs = inputBuilder.build(); outputs = outputBuilder.build(); id = Collections.min(inputs); } } } /** * BlobNode represents the vertex in the blob graph ({@link BlobGraph}). It * represents a {@link Blob} and carry the draining process of that blob. * * @author Sumanan */ private static final class BlobNode { private AbstractDrainer drainer; /** * The blob that wrapped by this blob node. */ private Token blobID; /** * Predecessor blob nodes of this blob node. */ private List<BlobNode> predecessors; /** * Successor blob nodes of this blob node. */ private List<BlobNode> successors; /** * The number of undrained predecessors of this blobs. Everytime, when a * predecessor finished draining, dependencyCount will be decremented * and once it reached to 0 this blob will be called for draining. */ private AtomicInteger dependencyCount; /** * Set to true iff this blob has been drained. */ private volatile boolean isDrained; private BlobNode(Token blob) { this.blobID = blob; predecessors = new ArrayList<>(); successors = new ArrayList<>(); dependencyCount = new AtomicInteger(0); isDrained = false; } /** * Should be called when the draining of the current blob has been * finished. This function stops all threads belong to the blob and * inform its successors as well. */ private void drained() { isDrained = true; for (BlobNode suc : this.successors) { suc.predecessorDrained(this); } drainer.drainingDone(this); } /** * Drain the blob mapped by this blob node. */ private void drain() { checkNotNull(drainer); drainer.drain(blobID, drainer.state == DrainerState.FINAL); } private ImmutableList<BlobNode> getSuccessors() { return ImmutableList.copyOf(successors); } private void addPredecessor(BlobNode pred) { assert !predecessors.contains(pred) : String.format( "The BlobNode %s has already been set as a predecessors", pred); predecessors.add(pred); dependencyCount.set(dependencyCount.get() + 1); } private void addSuccessor(BlobNode succ) { assert !successors.contains(succ) : String .format("The BlobNode %s has already been set as a successor", succ); successors.add(succ); } private void predecessorDrained(BlobNode pred) { if (!predecessors.contains(pred)) throw new IllegalArgumentException("Illegal Predecessor"); assert dependencyCount.get() > 0 : String .format("Graph mismatch : My predecessors count is %d. But more than %d of BlobNodes claim me as their successor", predecessors.size(), predecessors.size()); if (dependencyCount.decrementAndGet() == 0) { drain(); } } /** * @return The number of undrained predecessors. */ private int getDependencyCount() { return dependencyCount.get(); } private void setDrainer(AbstractDrainer drainer) { checkNotNull(drainer); this.drainer = drainer; } } /** * Color enumerator used by DFS algorithm to find cycles in the blob graph. */ private enum Color { WHITE, GRAY, BLACK } /** * Reflects {@link AbstractDrainer}'s state. */ private enum DrainerState { NODRAINING, /** * Draining in middle of the stream graph's execution. This * type of draining will be triggered by the open tuner for * reconfiguration. Drained data of all blobs are expected in this case. */ INTERMEDIATE, /** * This type of draining will take place when input stream * runs out. No drained data expected as all blob are expected to * executes until all input buffers become empty. */ FINAL } }
package jadx.core.codegen; import jadx.core.Consts; import jadx.core.dex.attributes.AType; import jadx.core.dex.attributes.IAttributeNode; import jadx.core.dex.attributes.annotations.Annotation; import jadx.core.dex.attributes.annotations.AnnotationsList; import jadx.core.dex.attributes.annotations.MethodParameters; import jadx.core.dex.info.FieldInfo; import jadx.core.dex.instructions.args.ArgType; import jadx.core.dex.nodes.ClassNode; import jadx.core.dex.nodes.FieldNode; import jadx.core.dex.nodes.MethodNode; import jadx.core.utils.StringUtils; import jadx.core.utils.exceptions.JadxRuntimeException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; public class AnnotationGen { private final ClassNode cls; private final ClassGen classGen; public AnnotationGen(ClassNode cls, ClassGen classGen) { this.cls = cls; this.classGen = classGen; } public void addForClass(CodeWriter code) { add(cls, code); } public void addForMethod(CodeWriter code, MethodNode mth) { add(mth, code); } public void addForField(CodeWriter code, FieldNode field) { add(field, code); } public void addForParameter(CodeWriter code, MethodParameters paramsAnnotations, int n) { List<AnnotationsList> paramList = paramsAnnotations.getParamList(); if (n >= paramList.size()) { return; } AnnotationsList aList = paramList.get(n); if (aList == null || aList.isEmpty()) { return; } for (Annotation a : aList.getAll()) { formatAnnotation(code, a); code.add(' '); } } private void add(IAttributeNode node, CodeWriter code) { AnnotationsList aList = node.get(AType.ANNOTATION_LIST); if (aList == null || aList.isEmpty()) { return; } for (Annotation a : aList.getAll()) { String aCls = a.getAnnotationClass(); if (aCls.startsWith(Consts.DALVIK_ANNOTATION_PKG)) { // skip if (Consts.DEBUG) { code.startLine(" } } else { code.startLine(); formatAnnotation(code, a); } } } private void formatAnnotation(CodeWriter code, Annotation a) { code.add('@'); classGen.useType(code, a.getType()); Map<String, Object> vl = a.getValues(); if (!vl.isEmpty()) { code.add('('); if (vl.size() == 1 && vl.containsKey("value")) { encodeValue(code, vl.get("value")); } else { for (Iterator<Entry<String, Object>> it = vl.entrySet().iterator(); it.hasNext(); ) { Entry<String, Object> e = it.next(); code.add(e.getKey()); code.add(" = "); encodeValue(code, e.getValue()); if (it.hasNext()) { code.add(", "); } } } code.add(')'); } } @SuppressWarnings("unchecked") public void addThrows(MethodNode mth, CodeWriter code) { Annotation an = mth.getAnnotation(Consts.DALVIK_THROWS); if (an != null) { Object exs = an.getDefaultValue(); code.add(" throws "); for (Iterator<ArgType> it = ((List<ArgType>) exs).iterator(); it.hasNext(); ) { ArgType ex = it.next(); classGen.useType(code, ex); if (it.hasNext()) { code.add(", "); } } } } public Object getAnnotationDefaultValue(String name) { Annotation an = cls.getAnnotation(Consts.DALVIK_ANNOTATION_DEFAULT); if (an != null) { Annotation defAnnotation = (Annotation) an.getDefaultValue(); return defAnnotation.getValues().get(name); } return null; } // TODO: refactor this boilerplate code public void encodeValue(CodeWriter code, Object val) { if (val == null) { code.add("null"); return; } if (val instanceof String) { code.add(StringUtils.unescapeString((String) val)); } else if (val instanceof Integer) { code.add(TypeGen.formatInteger((Integer) val)); } else if (val instanceof Character) { code.add(StringUtils.unescapeChar((Character) val)); } else if (val instanceof Boolean) { code.add(Boolean.TRUE.equals(val) ? "true" : "false"); } else if (val instanceof Float) { code.add(TypeGen.formatFloat((Float) val)); } else if (val instanceof Double) { code.add(TypeGen.formatDouble((Double) val)); } else if (val instanceof Long) { code.add(TypeGen.formatLong((Long) val)); } else if (val instanceof Short) { code.add(TypeGen.formatShort((Short) val)); } else if (val instanceof Byte) { code.add(TypeGen.formatByte((Byte) val)); } else if (val instanceof ArgType) { classGen.useType(code, (ArgType) val); code.add(".class"); } else if (val instanceof FieldInfo) { // must be a static field FieldInfo field = (FieldInfo) val; InsnGen.makeStaticFieldAccess(code, field, classGen); } else if (val instanceof Iterable) { code.add('{'); Iterator<?> it = ((Iterable) val).iterator(); while (it.hasNext()) { Object obj = it.next(); encodeValue(code, obj); if (it.hasNext()) { code.add(", "); } } code.add('}'); } else if (val instanceof Annotation) { formatAnnotation(code, (Annotation) val); } else { // TODO: also can be method values throw new JadxRuntimeException("Can't decode value: " + val + " (" + val.getClass() + ")"); } } }
package edu.wheaton.simulator.gui; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Map; import javax.swing.JFileChooser; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JOptionPane; import com.google.common.collect.ImmutableMap; import net.sourceforge.jeval.EvaluationException; import edu.wheaton.simulator.datastructure.Field; import edu.wheaton.simulator.datastructure.Grid; import edu.wheaton.simulator.entity.Agent; import edu.wheaton.simulator.gui.screen.EditEntityScreen; import edu.wheaton.simulator.gui.screen.EditFieldScreen; import edu.wheaton.simulator.gui.screen.NewSimulationScreen; import edu.wheaton.simulator.gui.screen.SetupScreen; import edu.wheaton.simulator.gui.screen.StatDisplayScreen; import edu.wheaton.simulator.gui.screen.TitleScreen; import edu.wheaton.simulator.gui.screen.ViewSimScreen; import edu.wheaton.simulator.simulation.Simulator; import edu.wheaton.simulator.simulation.end.SimulationEnder; import edu.wheaton.simulator.statistics.Loader; import edu.wheaton.simulator.statistics.Saver; import edu.wheaton.simulator.statistics.StatisticsManager; public class SimulatorGuiManager { private ScreenManager sm; private SimulationEnder se; private StatisticsManager statMan; private Simulator simulator; private boolean simulationIsRunning; private boolean canSpawn; private GridPanel gridPanel; private GridPanelObserver gpo; private Loader loader; private Saver saver; private boolean hasStarted; private JFileChooser fc; public SimulatorGuiManager(Display d) { canSpawn = true; initSim("New Simulation",10, 10); gridPanel = new GridPanel(this); sm = new ScreenManager(d); sm.putScreen("Title", new TitleScreen(this)); sm.putScreen("New Simulation", new NewSimulationScreen(this)); sm.putScreen("Edit Fields", new EditFieldScreen(this)); sm.putScreen("Edit Entities", new EditEntityScreen(this)); sm.putScreen("View Simulation", new ViewSimScreen(this)); sm.putScreen("Statistics", new StatDisplayScreen(this)); sm.putScreen("Grid Setup", new SetupScreen(this)); sm.getDisplay().setJMenuBar(makeMenuBar()); se = new SimulationEnder(); loader = new Loader(); statMan = StatisticsManager.getInstance(); hasStarted = false; gpo = new GridPanelObserver(gridPanel); fc = new JFileChooser(); } public SimulatorGuiManager(){ this(new Display()); } public ScreenManager getScreenManager(){ return sm; } public GridPanel getGridPanel(){ return gridPanel; } public void initSim(String name,int x, int y) { System.out.println("Reset prototypes"); simulator = Simulator.getInstance(); simulator.load(name, x,y,se); simulator.addGridObserver(gpo); if(gridPanel != null) gridPanel.setGrid(getSimGrid()); } private Simulator getSim() { return simulator; } public Field getSimGlobalField(String name){ return getSim().getGlobalField(name); } public void addSimGlobalField(String name, String value){ getSim().addGlobalField(name, value); } public void removeSimGlobalField(String name){ getSim().removeGlobalField(name); } private SimulationEnder getSimEnder() { return se; } public void setSimStepLimit(int maxSteps){ getSimEnder().setStepLimit(maxSteps); } public Integer getSimStepLimit(){ return getSimEnder().getStepLimit(); } public void setSimPopLimit(String typeName, int maxPop){ getSimEnder().setPopLimit(typeName, maxPop); } public ImmutableMap<String, Integer> getSimPopLimits(){ return getSimEnder().getPopLimits(); } public void removeSimPopLimit(String typeName){ getSimEnder().removePopLimit(typeName); } public StatisticsManager getStatManager(){ return statMan; } public String getSimName(){ return getSim().getName(); } public void updateGuiManager(String nos, int width, int height){ getSim().setName(nos); resizeSimGrid(width, height); } public boolean isSimRunning() { return simulationIsRunning; } public void setSimRunning(boolean b) { simulationIsRunning = b; } public void setSimStarted(boolean b) { hasStarted = b; } public boolean hasSimStarted() { return hasStarted; } public Integer getSimGridHeight(){ return getSim().getGrid().getHeight(); } public void resizeSimGrid(int width,int height){ getSim().resizeGrid(width, height); } public Integer getSimGridWidth(){ return getSim().getGrid().getWidth(); } public void setSimLayerExtremes() throws EvaluationException{ getSim().setLayerExtremes(); } public Agent getSimAgent(int x, int y){ return getSim().getAgent(x, y); } public void removeSimAgent(int x, int y){ getSim().removeAgent(x, y); } public void initSampleSims(){ getSim().initSamples(); } public void initGameOfLifeSim(){ getSim().initGameOfLife(); } public void initRockPaperScissorsSim(){ getSim().initRockPaperScissors(); } public void setSimLinearUpdate(){ getSim().setLinearUpdate(); } public void setSimAtomicUpdate(){ getSim().setAtomicUpdate(); } public void setSimPriorityUpdate(int a, int b){ getSim().setPriorityUpdate(a, b); } public String getCurrentSimUpdater(){ return getSim().currentUpdater(); } public void pauseSim(){ setSimRunning(false); canSpawn = true; simulator.pause(); } public boolean canSimSpawn() { return canSpawn; } public boolean addAgent(String prototypeName, int x, int y){ return getSim().addAgent(prototypeName, x, y); } public void loadSim() { int returnVal = fc.showOpenDialog(null); String fileName = ""; if (returnVal == JFileChooser.APPROVE_OPTION) { fileName = fc.getSelectedFile().getName(); //TODO make new simulator //initSim(fileName, x, y); //this should eventually be statMan.loadSim(fileName), once that actually gets written //loader.loadSimulation(fileName); } } public void saveSim(String fileName) { //TODO get statistics team to provide a 'saveSim(String fileName)' method //statMan.saveSimulation(fileName); } public void startSim(){ setSimRunning(true); setSimStarted(true); canSpawn = false; simulator.play(); } private JMenuBar makeMenuBar() { JMenuBar menuBar = new JMenuBar(); JMenu fileMenu = makeFileMenu(this); //JMenu editMenu = makeEditMenu(sm); JMenu helpMenu = makeHelpMenu(sm); menuBar.add(fileMenu); //menuBar.add(editMenu); menuBar.add(helpMenu); return menuBar; } private JMenu makeFileMenu(final SimulatorGuiManager guiManager) { JMenu menu = Gui.makeMenu("File"); menu.add(Gui.makeMenuItem("New Simulation", new GeneralButtonListener("New Simulation",guiManager.sm))); menu.add(Gui.makeMenuItem("Save Simulation", new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { String fileName = JOptionPane.showInputDialog("Please enter file name: "); saveSim(fileName); } } )); menu.add(Gui.makeMenuItem("Load Simulation", new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { loadSim(); } } )); menu.add(Gui.makeMenuItem("Exit",new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { guiManager.setSimRunning(false); System.exit(0); } })); return menu; } private JMenu makeEditMenu(final ScreenManager sm) { JMenu menu = Gui.makeMenu("Edit"); menu.add(Gui.makeMenuItem("Edit Global Fields", new GeneralButtonListener("Fields",sm))); return menu; } private static JMenu makeHelpMenu(final ScreenManager sm) { JMenu menu = Gui.makeMenu("Help"); menu.add(Gui.makeMenuItem("About",new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { JOptionPane.showMessageDialog(sm.getDisplay(), "Wheaton College. Software Development 2013.", "About",JOptionPane.PLAIN_MESSAGE); } })); menu.add(Gui.makeMenuItem("Help Contents",new ActionListener(){ @Override public void actionPerformed(ActionEvent e) { JOptionPane.showMessageDialog(sm.getDisplay(), "Wheaton College. Software Development 2013.\n Help Contents", "Help Contents",JOptionPane.PLAIN_MESSAGE); } })); return menu; } public void setSimName(String name) { getSim().setName(name); } public Integer getSleepPeriod() { return getSim().getSleepPeriod(); } public void setSleepPeriod(int n) { getSim().setSleepPeriod(n); } }
/* Open Source Software - may be modified and shared by FRC teams. The code */ /* the project. */ package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Jaguar; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Compressor; import robotHardware.Drivetrain; import robotHardware.RampArm; import robotHardware.RobotMap; import robotHardware.Shooter; /** * The VM is configured to automatically run this class, and to call the * functions corresponding to each mode, as described in the IterativeRobot * documentation. If you change the name of this class or the package after * creating this project, you must also update the manifest file in the resource * directory. */ public class RobotTemplate extends IterativeRobot { /** * This function is run when the robot is first started up and should be * used for any initialization code. */ /* * the value used to equalize the drives if they are moving in the same direction * must be between 0 and 1 * lower values will make the robot veer more easily */ public static double EQUALIZATION_FACTOR = 0.2; /* * if the difference b/w the motor values after the equalization divisor is applied, they are set to be equal. * must be between 0 and 2 * higher values will make the robot drive straighter */ public static double EQUALIZATION_THRESHOLD = 0.1; /* * slows down acceleration if input changes faster than this * must be between 0 and 2 * higher values will make the robot limit acceleration more */ public static double ACCELERATION_THRESHOLD = 0.03; /* * slows down acceleration by this factor if input acceleration exceeds the limit above * must be between 0 and 1 * lower values will make the robot acclerate slower when limiting is in effect. */ public static double ACCELERATION_FACTOR = 0.7; public static double FP_ACTIVATION_THRESHOLD = 0.7; private Joystick driverLeft; private Joystick driverRight; private Joystick operator; private Timer timer; private Compressor compressor = new Compressor(RobotMap.pressureSwitch, RobotMap.compressor); /* * the values curently being sent to the jaguars */ private double lastLeftInput = 0; private double lastRightInput = 0; public void robotInit() { driverLeft = new Joystick(2); driverRight = new Joystick(1); operator = new Joystick(3); timer = new Timer(); } /** * This function is called periodically during autonomous */ public void autonomousPeriodic() { } public void teleopInit() { timer.start(); compressor.start(); } /** * This function is called periodically during operator control */ public void teleopPeriodic() { if (timer.get() > 0.05) { timer.reset(); timer.start(); drive(driverLeft.getY(), driverRight.getY()); } if (operator.getRawButton(6)) { Shooter.runShooter(1); } else { Shooter.stopShooter(); } if(operator.getRawButton(7)) { Shooter.startFeeder(1); } else if (operator.getRawButton(5)) { Shooter.startFeeder(-1); } else { Shooter.startFeeder(0); } Shooter.startIntake(operator.getRawAxis(3)); } public void disabledInit() { compressor.stop(); timer.stop(); timer.reset(); } private void drive(double leftJoystick, double rightJoystick) { double leftPower = leftJoystick; double rightPower = rightJoystick; /* * slows down acceleration if joystick input changes too quickly */ if(leftPower > 0 && lastLeftInput <=0) { if (leftPower > 0 == rightPower > 0) { if(Math.abs(leftPower - lastLeftInput) > ACCELERATION_THRESHOLD) { if(leftPower < lastLeftInput) { leftPower = lastLeftInput - ACCELERATION_THRESHOLD; } else if (leftPower > lastLeftInput) { leftPower = lastLeftInput + ACCELERATION_THRESHOLD; } } if(Math.abs(rightPower - lastRightInput) > ACCELERATION_THRESHOLD){ if (rightPower < lastRightInput) { rightPower = lastRightInput - ACCELERATION_THRESHOLD; } else if (rightPower > lastRightInput) { rightPower = lastRightInput + ACCELERATION_THRESHOLD; } } } } /* * if the difference of the 2 power values are within the threshold, sets both to their average */ if (Math.abs(leftPower - rightPower) < EQUALIZATION_THRESHOLD) { double averagePower = (leftPower + rightPower)/2; leftPower = averagePower; rightPower = averagePower; } /* * if both drives are moting in the same direction, sets them closer to each other. */ /*if(leftJoystick < 0 == rightJoystick < 0) { leftPower -= (leftJoystick - rightJoystick)*EQUALIZATION_FACTOR; rightPower -= (rightJoystick - leftJoystick)*EQUALIZATION_FACTOR; }*/ lastLeftInput = leftPower; lastRightInput = rightPower; // runs the FP motors if (Math.abs(leftPower) > FP_ACTIVATION_THRESHOLD) { Drivetrain.driveLeftFP(leftPower); } else { Drivetrain.driveLeftFP(0); } if (Math.abs(rightPower) > FP_ACTIVATION_THRESHOLD) { Drivetrain.driveRightFP(rightPower); } else { Drivetrain.driveRightFP(0); } // runs the Cims Drivetrain.driveCIMs(leftPower, rightPower); // lowers puck if drivetrains are in different directions or button is pressed if (driverLeft.getButton(Joystick.ButtonType.kTop) || Math.abs(leftPower - rightPower) > 0.3) { Drivetrain.lowerPuck(); } else { Drivetrain.raisePuck(); } // lowers arm if operator signals if (operator.getRawButton(3)) { RampArm.lower(); } else RampArm.raise(); } }
package edu.wpi.first.wpilibj.templates; import edu.wpi.first.wpilibj.*; //Driver joystick class Driver { //Buttons static final int TRANS_TOGGLE_LEFT = 7; static final int TRANS_TOGGLE_RIGHT = 8; static final int ARCADE_TOGGLE = 1; //Axes static final int X_AXIS_LEFT = 1; static final int Y_AXIS_LEFT = 2; static final int X_AXIS_RIGHT = 3; static final int Y_AXIS_RIGHT = 4; } //Operator joystick class Operator { //Buttons static final int ELEVATOR_STATE_GROUND = 4; static final int ELEVATOR_STATE_ONE = 11; static final int ELEVATOR_STATE_TWO = 12; static final int ELEVATOR_STATE_THREE = 9; static final int ELEVATOR_STATE_FOUR = 10; static final int ELEVATOR_STATE_FIVE = 7; static final int ELEVATOR_STATE_SIX = 8; static final int ELEVATOR_STATE_FEED = 6; static final int ELEVATOR_MANUAL_TOGGLE = 5; static final int GRIPPER_TOGGLE = 3; static final int ELBOW_UP = 6; static final int ELBOW_DOWN = 4; static final int MINIBOT_RELEASE_ONE = 5; static final int MINIBOT_RELEASE_TWO = 6; static final int RELEASE_TUBE = 1; static final int LIGHT_SELECTION = 2; static final int LIGHT_RED = 7; static final int LIGHT_WHITE = 9; static final int LIGHT_BLUE = 11; static final int LIGHT_OFF = 10; } //Enumeration of setpoints for different heights of the elevator class ElevatorSetpoint { static final double ground = 0; static final double posOne = 250; static final double posTwo = 620; static final double posThree = 2000; static final double posFour = 2360; static final double posFive = 3750; static final double posSix = 3900; static final double feed = 1865; } class ElbowState { static final int Horizontal = 0; static final int Middle = 1; static final int Vertical = 2; } //Enumeration of autonomous modes class AutonomousState { static final int Driving = 0; static final int Turning = 1; static final int Reset = 2; static final int Release = 3; static final int Done = 4; static final int Sleep = 5; } class Lights { static final int Red = 0; static final int White = 1; static final int Blue = 2; static final int Off = 3; } public class RobotTemplate extends IterativeRobot { //Practise robot or competition robot static final boolean PRACTISE_ROBOT = true; //Encoder rate at max speed in slow gear static final double SLOW_MAX_ENCODER_RATE = 750.0; //Encoder rate at max speed in fast gear static final double FAST_MAX_ENCODER_RATE = 1700.0; //Speed to set the elevator motor to static final double ELEVATOR_SPEED_UP = 1.0; static final double ELEVATOR_SPEED_DOWN = -0.7; //Max drive motor speed static final double MAX_DRIVE_SPEED = 1.0; //Encoder counts per metre travelled static final double COUNTS_PER_METRE = 500; //Number of elevator encoder counts static final int MAX_ELEVATOR_COUNTS = 2400; //Number of seconds to wait in teleoperated mode before the minibot is allowed to be deployed static final double MINIBOT_RELEASE_TIME = 110.0; //Number of seconds after the minibot drops before we send it out horizontally static final double MINIBOT_HORIZONTAL_DELAY = 0.5; //Tolerance for the gyro pid static final double GYRO_TOLERANCE = 5.0; //Delay between static final double AUTONOMOUS_RELEASE_DELAY = 0.5; //Print delay static final double PRINT_DELAY = 0.5; static final int AUTONOMOUS_DRIVE_COUNTS = 2600; static final double ULTRASONIC_VOLTS_PER_INCH = 0.0098; //Driver station DriverStation ds = DriverStation.getInstance(); //Joysticks Joystick stickDriver = new Joystick(1); Joystick stickOperator = new Joystick(2); //Compressor, switch is DI 10, spike is relay 1 Compressor compressor = new Compressor(10, 1); Pneumatic lightsOne; Pneumatic lightsTwo; //Solenoids for main robot or practise robot Pneumatic transShift; Pneumatic elbowTop; Pneumatic elbowBottom; Pneumatic gripper; Pneumatic minibotHorizontal; //Vertical is always a double solenoid Pneumatic minibotVertical; //Gyro Gyro gyro = new Gyro(1); PIDOutputStorage gyroOutput = new PIDOutputStorage(); //Jaguars Jaguar jagLeft = new Jaguar(1); Jaguar jagRight = new Jaguar(2); //Stores output from robotDrive OutputStorage storageLeft = new OutputStorage(); OutputStorage storageRight = new OutputStorage(); //DI 3 doesn't work //Victors Victor vicElevator = new Victor(3); //Encoders PIDEncoder encLeft; Encoder encNull; PIDEncoder encElevator; PIDEncoder encRight; DigitalInput rightSensor = new DigitalInput(11); DigitalInput middleSensor = new DigitalInput(12); DigitalInput leftSensor = new DigitalInput(13); //Provides drive functions (arcade and tank drive) RobotDrive robotDrive = new RobotDrive(storageLeft, storageRight); //PIDs PIDController pidLeft; PIDController pidRight; PIDController pidGyro; boolean transState; //Toggle for manual or automated elevator control Toggle manualElevatorToggle = new Toggle(false); //Toggle for the gripper Toggle gripperToggle = new Toggle(false); //Toggle for arcade/tank drive Toggle arcadeToggle = new Toggle(true); //State of elbow int elbowState; ButtonPress elbowUp = new ButtonPress(); ButtonPress elbowDown = new ButtonPress(); //The elevator setpoint, determined by which button on the operator joystick is pressed double elevatorSetpoint = ElevatorSetpoint.ground; //Runs when the robot is turned public void robotInit() { transState = false; if(!PRACTISE_ROBOT) { encLeft = new PIDEncoder(true, 3, 4, true); encNull = new Encoder(7, 8); encElevator = new PIDEncoder(false, 5, 6, true); encRight = new PIDEncoder(true, 1, 2, true); } else { encLeft = new PIDEncoder(true, 5, 6, true); encNull = new Encoder(3, 4); encElevator = new PIDEncoder(false, 7, 8); encRight = new PIDEncoder(true, 1, 2, true); } pidLeft = new PIDController(0.0, 0.0005, 0.0, encLeft, jagLeft, 0.005); pidRight = new PIDController(0.0, 0.0005, 0.0, encRight, jagRight, 0.005); pidGyro = new PIDController(0.0005, 0.0005, 0.0, gyro, gyroOutput, 0.005); //Initialize our pneumatics if we are using the practise robot or the real robot if(!PRACTISE_ROBOT) { transShift = new Pneumatic(new Solenoid(8)); elbowTop = new Pneumatic(new Solenoid(3)); elbowBottom = new Pneumatic(new Solenoid(2)); gripper = new Pneumatic(new Solenoid(1)); minibotHorizontal = new Pneumatic(new Solenoid(4)); minibotVertical = new Pneumatic(new DoubleSolenoid(6, 7)); lightsOne = new Pneumatic(new Relay(2)); lightsTwo = new Pneumatic(new Relay(3)); } else { transShift = new Pneumatic(new Relay(5)); elbowTop = new Pneumatic(new DoubleSolenoid(3, 4)); elbowBottom = new Pneumatic(new DoubleSolenoid(5, 6)); gripper = new Pneumatic(new DoubleSolenoid(1, 2)); minibotHorizontal = new Pneumatic(new Relay(7)); minibotVertical = new Pneumatic(new DoubleSolenoid(7, 8)); lightsOne = new Pneumatic(new Relay(6)); lightsTwo = new Pneumatic(new Relay(8)); } //Start our encoders encRight.start(); encLeft.start(); encElevator.start(); //Start our elevator encoder at 0 encElevator.reset(); //Input/output range for left encoder/motors pidLeft.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE); pidLeft.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED); //Input/output range for right encoder/motors pidRight.setInputRange(-SLOW_MAX_ENCODER_RATE, SLOW_MAX_ENCODER_RATE); pidRight.setOutputRange(-MAX_DRIVE_SPEED, MAX_DRIVE_SPEED); //Input/output range for the gyro PID pidGyro.setInputRange(-360.0, 360.0); pidGyro.setOutputRange(-0.5, 0.5); //Start the compressor compressor.start(); } //Runs at the beginning of disabled period public void disabledInit() { //Disable PIDs pidLeft.disable(); pidRight.disable(); pidGyro.disable(); } //Runs periodically during disabled period public void disabledPeriodic() { //Call our print function with the current mode print("Disabled"); } //List of autonomous steps Step stepList[] = null; //Iterates through each step int stepIndex; boolean doNothing; boolean trackLine; boolean steal = true; boolean heightOne; boolean heightTwo; boolean heightThree; boolean staggeredPeg; boolean releaseTube; double startPosition; //Runs at the beginning of autonomous period public void autonomousInit() { //Digital/analog inputs doNothing = ds.getDigitalIn(1); trackLine = ds.getDigitalIn(2); heightOne = ds.getDigitalIn(3); heightTwo = ds.getDigitalIn(4); heightThree = ds.getDigitalIn(5); staggeredPeg = ds.getDigitalIn(6); releaseTube = ds.getDigitalIn(7); startPosition = ds.getAnalogIn(1); //Minibot defaults to up minibotHorizontal.set(false); minibotVertical.set(false); //Default to slow driving mode transShift.set(false); //Reset gyro and enable PID on gyro pidGyro.enable(); gyro.reset(); //Enable PID on wheels pidLeft.enable(); pidRight.enable(); //Reset encoders encLeft.reset(); encRight.reset(); //Current step stepIndex = 0; //Reset the counter for how many times the gyro has reached its setpoint gyroCounter = 0; setElbow(ElbowState.Vertical); if(doNothing) { stepList = new Step[] { new Step(AutonomousState.Done), }; } else { stepList = new Step[] { new Step(AutonomousState.Driving, AUTONOMOUS_DRIVE_COUNTS), new Step(AutonomousState.Release), //new Step(AutonomousState.Driving, steal ? COUNTS_PER_METRE - AUTONOMOUS_DRIVE_COUNTS : 0), //new Step(AutonomousState.Turning, steal ? 90 : 0), //new Step(AutonomousState.Turning, steal ? -90 : 0), new Step(AutonomousState.Done), }; } //Determine the setpoint of the elevator elevatorSetpoint = (heightOne && !staggeredPeg) ? ElevatorSetpoint.posOne : elevatorSetpoint; elevatorSetpoint = (heightOne && staggeredPeg) ? ElevatorSetpoint.posTwo : elevatorSetpoint; elevatorSetpoint = (heightTwo && !staggeredPeg) ? ElevatorSetpoint.posThree : elevatorSetpoint; elevatorSetpoint = (heightTwo && staggeredPeg) ? ElevatorSetpoint.posFour : elevatorSetpoint; elevatorSetpoint = (heightThree && !staggeredPeg) ? ElevatorSetpoint.posFive : elevatorSetpoint; elevatorSetpoint = (heightThree && staggeredPeg) ? ElevatorSetpoint.posSix : elevatorSetpoint; } //Runs periodically during autonomous period public void autonomousPeriodic() { //Call our print function with the current mode print("Autonomous"); } static final double DEFAULT_STEERING_GAIN = 0.65; //Runs continuously during autonomous period public void autonomousContinuous() { if(trackLine && !doNothing) { int binaryValue; // a single binary value of the three line tracking // sensors int previousValue = 0; // the binary value from the previous loop double steeringGain; // the amount of steering correction to apply // the power profiles for the straight and forked robot path. They are // different to let the robot drive more slowly as the robot approaches // the fork on the forked line case. double forkProfile[] = {0.70, 0.70, 0.55, 0.60, 0.60, 0.50, 0.40, 0.00}; double straightProfile[] = {0.7, 0.7, 0.6, 0.6, 0.35, 0.35, 0.35, 0.0}; double powerProfile[]; // the selected power profile // set the straightLine and left-right variables depending on chosen path boolean straightLine = ds.getDigitalIn(1); powerProfile = (straightLine) ? straightProfile : forkProfile; double stopTime = (straightLine) ? 2.0 : 4.0; // when the robot should look for end boolean goLeft = !ds.getDigitalIn(2) && !straightLine; System.out.println("StraightLine: " + straightLine); System.out.println("GoingLeft: " + goLeft); boolean atCross = false; // if robot has arrived at end // time the path over the line Timer timer = new Timer(); timer.start(); timer.reset(); int oldTimeInSeconds = -1; double time; double speed, turn; // loop until robot reaches "T" at end or 8 seconds has past while ((time = timer.get()) < 8.0 && !atCross) { int timeInSeconds = (int) time; // read the sensors int leftValue = leftSensor.get() ? 1 : 0; int middleValue = middleSensor.get() ? 1 : 0; int rightValue = rightSensor.get() ? 1 : 0; // compute the single value from the 3 sensors. Notice that the bits // for the outside sensors are flipped depending on left or right // fork. Also the sign of the steering direction is different for left/right. if (goLeft) { binaryValue = leftValue * 4 + middleValue * 2 + rightValue; steeringGain = -DEFAULT_STEERING_GAIN; } else { binaryValue = rightValue * 4 + middleValue * 2 + leftValue; steeringGain = DEFAULT_STEERING_GAIN; } // get the default speed and turn rate at this time speed = powerProfile[timeInSeconds]; turn = 0; // different cases for different line tracking sensor readings switch (binaryValue) { case 1: // on line edge turn = 0; break; case 7: // all sensors on (maybe at cross) if (time > stopTime) { atCross = true; speed = 0; } break; case 0: // all sensors off if (previousValue == 0 || previousValue == 1) { turn = steeringGain; } else { turn = -steeringGain; } break; default: // all other cases turn = -steeringGain; } // print current status for debugging if (binaryValue != previousValue) { System.out.println("Time: " + time + " Sensor: " + binaryValue + " speed: " + speed + " turn: " + turn + " atCross: " + atCross); } // set the robot speed and direction robotDrive.arcadeDrive(speed, turn); pidLeft.setSetpoint(storageLeft.get() * SLOW_MAX_ENCODER_RATE); pidRight.setSetpoint(storageRight.get() * SLOW_MAX_ENCODER_RATE); if (binaryValue != 0) { previousValue = binaryValue; } oldTimeInSeconds = timeInSeconds; Timer.delay(0.01); } // Done with loop - stop the robot. Robot ought to be at the end of the line pidLeft.setSetpoint(0.0); pidRight.setSetpoint(0.0); } else { //Our current step in our list of steps Step currentStep = stepList[stepIndex]; //The last step we did int lastStepIndex = stepIndex; //If we have a step to do if(currentStep != null) { //Switch the type of step switch(currentStep.type) { //If we want to drive forward case AutonomousState.Driving: int direction = currentStep.get() > 0 ? 1 : -1; //If we have reached our value for this step on the left or right side final boolean leftDone = -encLeft.encoder.get() >= Math.abs(currentStep.get()); final boolean rightDone = encRight.encoder.get() >= Math.abs(currentStep.get()); //Drive each side until we reach the value for each side robotDrive.arcadeDrive(0.65, gyroPID(true, 0.0)); if(!leftDone) pidLeft.setSetpoint(direction * -storageLeft.get() * SLOW_MAX_ENCODER_RATE); else pidLeft.disable(); if(!rightDone) pidRight.setSetpoint(direction * -storageRight.get() * SLOW_MAX_ENCODER_RATE); else pidRight.disable(); //If the value is reached if(elevatorPID() && leftDone && rightDone) ++stepIndex; break; //If we want to turn case AutonomousState.Turning: //Disable PIDs for smoother turning if(pidLeft.isEnable() || pidRight.isEnable()) { pidLeft.disable(); pidRight.disable(); } if(false) { //Set the setpoint for the gyro PID to the step's setpoint pidGyro.setSetpoint(currentStep.get()); //Drive the motors with the output from the gyro PID jagLeft.set(-gyroOutput.get()); jagRight.set(-gyroOutput.get()); //Difference between our position and our setpoint final double delta = currentStep.get() - gyro.pidGet(); //If the gyro is below or above the target angle depending on the direction we are turning if(Math.abs(delta) < GYRO_TOLERANCE) ++gyroCounter; if(gyroCounter >= 10) { ++stepIndex; pidLeft.enable(); pidRight.enable(); } } else { gyroPID(false, currentStep.get()); } break; case AutonomousState.Reset: setElbow(ElbowState.Vertical); elevatorSetpoint = ElevatorSetpoint.ground; if(elevatorPID()) ++stepIndex; break; //To release the tube case AutonomousState.Release: if(releaseTube) { setElbow(ElbowState.Middle); Timer.delay(AUTONOMOUS_RELEASE_DELAY); releaseTube(); Timer.delay(1000); elevatorSetpoint = steal ? ElevatorSetpoint.ground : elevatorSetpoint; } ++stepIndex; break; //If we are done our autonomous mode case AutonomousState.Done: pidLeft.disable(); pidRight.disable(); break; //Sleep state case AutonomousState.Sleep: double time = currentStep.get(); pidLeft.disable(); pidRight.disable(); while(time > 0) { print("Autonomous"); Timer.delay(1); --time; Watchdog.getInstance().feed(); } pidLeft.enable(); pidLeft.enable(); ++stepIndex; break; default: ++stepIndex; break; } } //If we want to go to the next step if(lastStepIndex != stepIndex) { //Reset everything encLeft.reset(); encRight.reset(); gyro.reset(); vicElevator.set(0.0); //Stop pidLeft.enable(); pidRight.enable(); pidLeft.setSetpoint(0.0); pidRight.setSetpoint(0.0); //Reset gyro counter to 0 gyroCounter = 0; System.out.println("Step: " + stepIndex); } } } //Start time for teleoperated mode double teleopStartTime; //Start time for when the minibot release is triggered double minibotReleaseTime; //Releasing minibot boolean releaseMinibot; int lightState = Lights.Red; //Runs at the beginning of teleoperated period public void teleopInit() { //Initialize variables teleopStartTime = Timer.getFPGATimestamp(); minibotReleaseTime = 0.0; releaseMinibot = false; //Minibot defaults to up minibotHorizontal.set(false); minibotVertical.set(false); } //Runs periodically during teleoperated period public void teleopPeriodic() { //Call our print function with the current mode print("Teleoperated"); } //Runs continuously during teleoperated period public void teleopContinuous() { //Don't allow the gyro to be more or less than 360 degrees if(gyro.pidGet() < -360 || gyro.pidGet() > 360) gyro.reset(); boolean selectLight = stickOperator.getRawButton(Operator.LIGHT_SELECTION); if(selectLight && stickOperator.getRawButton(Operator.LIGHT_RED)) { lightState = Lights.Red; } if(selectLight && stickOperator.getRawButton(Operator.LIGHT_WHITE)) { lightState = Lights.White; } if(selectLight && stickOperator.getRawButton(Operator.LIGHT_BLUE)) { lightState = Lights.Blue; } if(selectLight && stickOperator.getRawButton(Operator.LIGHT_OFF)) { lightState = Lights.Off; } flashLED(); if(!selectLight) { //The elevator setpoint based on the corresponding button elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_GROUND) ? ElevatorSetpoint.ground : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_ONE) ? ElevatorSetpoint.posOne : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_TWO) ? ElevatorSetpoint.posTwo : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_THREE) ? ElevatorSetpoint.posThree : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FOUR) ? ElevatorSetpoint.posFour : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FIVE) ? ElevatorSetpoint.posFive : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_SIX) ? ElevatorSetpoint.posSix : elevatorSetpoint; elevatorSetpoint = stickOperator.getRawButton(Operator.ELEVATOR_STATE_FEED) ? ElevatorSetpoint.feed : elevatorSetpoint; } //Feed the toggle on the manual/automated elevator control manualElevatorToggle.feed(stickOperator.getRawButton(Operator.ELEVATOR_MANUAL_TOGGLE)); //Manual or automated elevator control if(manualElevatorToggle.get()) { vicElevator.set(-stickOperator.getAxis(Joystick.AxisType.kY)); } else { elevatorPID(); } //Minus because the left encoder is negative double rate = Math.abs((encRight.pidGet() - encLeft.pidGet()) / 2); if(!transState) transState = rate >= 0.9 * SLOW_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) >= 0.9 ? true : transState; else if(transState) transState = rate <= 0.6 * FAST_MAX_ENCODER_RATE && Math.abs(stickDriver.getRawAxis(Driver.Y_AXIS_LEFT)) <= 0.6 ? false : transState; transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_LEFT) ? false : transState; transState = stickDriver.getRawButton(Driver.TRANS_TOGGLE_RIGHT) ? true : transState; //TODO: the following line was a hack to make the test robot work. remove it //transState = false; //Set the transmission shifter to open or closed based on the state of the toggle transShift.set(transState); //Determine the input range to use (max encoder rate) to use depending on the transmission state we are in double maxEncoderRate = transState ? FAST_MAX_ENCODER_RATE : SLOW_MAX_ENCODER_RATE; pidLeft.setInputRange(-maxEncoderRate, maxEncoderRate); pidRight.setInputRange(-maxEncoderRate, maxEncoderRate); //Feed the toggle on the gripper button gripperToggle.feed(stickOperator.getRawButton(Operator.GRIPPER_TOGGLE)); //Set the gripper to open or closed based on the state of the toggle if(elbowState != ElbowState.Vertical) gripper.set(gripperToggle.get()); //Feed the buttons elbowUp.feed(stickOperator.getRawButton(Operator.ELBOW_UP)); elbowDown.feed(stickOperator.getRawButton(Operator.ELBOW_DOWN)); //Horizontal < Middle < Vertical /*if(elbowUp.get()) elbowState += elbowState < ElbowState.Vertical ? 1 : 0; if(elbowDown.get()) elbowState -= elbowState > ElbowState.Horizontal ? 1 : 0;*/ double elbowInput = -stickOperator.getAxis(Joystick.AxisType.kThrottle); if(elbowInput < -0.5) elbowState = ElbowState.Horizontal; else if(elbowInput > 0.5) elbowState = ElbowState.Vertical; else elbowState = ElbowState.Middle; setElbow(elbowState); //Feed the toggle on the arcade/tank drive button arcadeToggle.feed(stickDriver.getRawButton(Driver.ARCADE_TOGGLE)); final boolean doPID = false; //Drive arcade or tank based on the state of the toggle if(arcadeToggle.get()) { //If PID is disabled if(!pidLeft.isEnable() || !pidRight.isEnable()) { //Enable PID pidLeft.enable(); pidRight.enable(); } if(pidLeft.isEnable() || pidRight.isEnable() && !doPID) { pidLeft.disable(); pidRight.disable(); } double driveAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT); driveAxis = Math.abs(driveAxis) < 0.2 ? 0.0 : driveAxis; double turnAxis = stickDriver.getRawAxis(Driver.X_AXIS_RIGHT); turnAxis = Math.abs(turnAxis) < 0.2 ? 0.0 : turnAxis; //Let the robotdrive class calculate arcade drive for us robotDrive.arcadeDrive(driveAxis, turnAxis); if(doPID) { pidLeft.setSetpoint(storageLeft.get() * maxEncoderRate); pidRight.setSetpoint(storageRight.get() * maxEncoderRate); } else { jagLeft.set(storageLeft.get()); jagRight.set(storageRight.get()); } } else if(!arcadeToggle.get()) { //If PID is disabled if(!pidLeft.isEnable() || !pidRight.isEnable() && doPID) { //Enable PID pidLeft.enable(); pidRight.enable(); } if(pidLeft.isEnable() || pidRight.isEnable() && !doPID) { pidLeft.disable(); pidRight.disable(); } //Left axis double leftAxis = stickDriver.getRawAxis(Driver.Y_AXIS_LEFT); //Any value less than 0.2 is set to 0.0 to create a dead zone leftAxis = Math.abs(leftAxis) < 0.2 ? 0.0 : leftAxis; //Right axis double rightAxis = stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT); //Any value less than 0.2 is set to 0.0 to create a dead zone rightAxis = Math.abs(rightAxis) < 0.2 ? 0.0 : rightAxis; if(doPID) { //Set the setpoint as a percentage of the maximum encoder rate pidLeft.setSetpoint(leftAxis * maxEncoderRate); pidRight.setSetpoint(-rightAxis * maxEncoderRate); } else { jagLeft.set(leftAxis); jagRight.set(-rightAxis); } } //If there are 10 seconds left if(Timer.getFPGATimestamp() - teleopStartTime >= MINIBOT_RELEASE_TIME) { //If we triggered the release, set the release to true, otherwise just leave it //Creates a one-way toggle releaseMinibot = stickOperator.getRawButton(Operator.MINIBOT_RELEASE_ONE) && stickOperator.getRawButton(Operator.MINIBOT_RELEASE_TWO) ? true : releaseMinibot; //If we want to release if(releaseMinibot) { //Set the vertical relay to released minibotVertical.set(true); //If the release time is 0 (we haven't set the release time yet) then set the release time //Allows us to set the release time only once minibotReleaseTime = minibotReleaseTime == 0.0 ? Timer.getFPGATimestamp() : minibotReleaseTime; //If it's been at least 2 seconds since the release was triggered if(Timer.getFPGATimestamp() - minibotReleaseTime >= MINIBOT_HORIZONTAL_DELAY) { //Set the horizontal relay to released minibotHorizontal.set(true); } } } } //Returns whether or not the setpoint has been reached public boolean elevatorPID() { //Difference between setpoint and our position final double error = elevatorSetpoint - encElevator.pidGet(); //We can be off by 5% final double toleranceWhileGoingUp = MAX_ELEVATOR_COUNTS * 0.05; final double toleranceWhileGoingDown = -MAX_ELEVATOR_COUNTS * 0.05; //Go up when below setpoint, down when above setpoint if(error > 0 && error > toleranceWhileGoingUp) vicElevator.set(ELEVATOR_SPEED_UP); else if(error < 0 && error < toleranceWhileGoingDown) vicElevator.set(ELEVATOR_SPEED_DOWN); else { vicElevator.set(0.0); return true; } return false; } public void setElbow(int state) { //Update the elbow state elbowState = state; //For the elbow pneumatics, closed = true open = false //The top elbow is only ever closed in the vertical state elbowTop.set(elbowState != ElbowState.Vertical); //The bottom elbow is only ever open in the horizontal state elbowBottom.set(elbowState == ElbowState.Horizontal); //If we are vertical then close the gripper if(elbowState == ElbowState.Vertical) { gripper.set(false); gripperToggle.set(false); } } //Number of times our setpoint has been reached int gyroCounter; public double gyroPID(boolean returnSpeed, double target) { //Use our own calculations to get to the setpoint of the gyro final double delta = target - gyro.getAngle(); //For straight driving in autonomous mode if(returnSpeed) { if(Math.abs(delta) < GYRO_TOLERANCE) ++gyroCounter; if(gyroCounter >= 100) { gyroCounter = 0; return 0.0; } //The speed is incorporated into straight driving so it has to be low final double speed = 0.1; return delta > 0 ? -speed :speed; } //For turning on the spot else { if(Math.abs(delta) < GYRO_TOLERANCE) ++gyroCounter; if(gyroCounter >= 100) ++stepIndex; //We are turning on the spot so the turning speed is high final double speed = 0.25; jagLeft.set(delta >= 0 ? -speed : speed); jagRight.set(delta >= 0 ? -speed : speed); return 0.0; } } public void releaseTube() { setElbow(ElbowState.Horizontal); gripper.set(true); //Toggle the gripper to be open at the beginning of teleop gripperToggle.set(true); } double flashTime = 0; static final double FLASH_TIME = 1.0; public void flashLED() { /*double now = Timer.getFPGATimestamp(); if(now - flashTime > FLASH_TIME) { flashTime = now; if(lightState < Lights.Blue) ++lightState; else lightState = Lights.Red; }*/ switch(lightState) { case Lights.Red: lightsOne.set(true); lightsTwo.relay.set(Relay.Value.kOff); break; case Lights.White: lightsOne.set(false); lightsTwo.relay.set(Relay.Value.kOff); break; case Lights.Blue: lightsOne.relay.set(Relay.Value.kOff); lightsTwo.set(true); break; case Lights.Off: default: lightsOne.relay.set(Relay.Value.kOff); lightsTwo.relay.set(Relay.Value.kOff); break; } if(transState) lightsTwo.set(false); } double lastPrintTime = 0; //Print function for our variables public void print(String mode) { //Current time final double curPrintTime = Timer.getFPGATimestamp(); //If it has been more than half a second if(curPrintTime - lastPrintTime > PRINT_DELAY) { //Make a bunch of newlines to clear the screen to only show the current output System.out.println("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"); //Print statements System.out.println("[" + mode + "]"); System.out.println("gripperToggle: " + gripperToggle.get()); System.out.println("renc count: " + encRight.encoder.get() + " lenc count: " + encLeft.encoder.get() + " elevator counts: " + encElevator.pidGet()); System.out.println("rencRate: " + encRight.pidGet() + " lencRate: " + encLeft.pidGet()); System.out.println("rSet: " + pidRight.getSetpoint() + " lSet: " + pidLeft.getSetpoint() + " eSet: " + elevatorSetpoint); System.out.println("rPID: " + pidRight.get() + " lPID: " + pidLeft.get()); System.out.println("manualElevator: " + manualElevatorToggle.get()); System.out.println("elevAxis: " + stickOperator.getAxis(Joystick.AxisType.kY) + " leftAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_LEFT) + " rightAxis: " + stickDriver.getRawAxis(Driver.Y_AXIS_RIGHT)); System.out.println("Gyro PIDget: " + gyro.pidGet() + " gyro output storage: " + gyroOutput.get()); System.out.println("jagLeft: " + jagLeft.get() + " jagRight: " + jagRight.get()); System.out.println("elbow input: " + stickOperator.getThrottle() + "elbowState: " + elbowState); //System.out.println("Raven gyro min: " + gyro.min + " max: " + gyro.max + " deadzone: " + gyro.deadzone + " center: " + gyro.center); System.out.println("rightSensor: " + rightSensor.get() + " middleSensor: " + middleSensor.get() + " leftSensor: " + leftSensor.get()); System.out.println("light: " + lightState); //Update the last print time lastPrintTime = curPrintTime; } } }
package biweekly.io.json; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import biweekly.parameter.Value; import biweekly.property.Categories; import biweekly.property.RecurrenceRule; import biweekly.property.RequestStatus; import biweekly.property.Summary; import biweekly.util.ListMultimap; /** * Holds the data type and value of a jCal property. * @author Michael Angstadt */ public class JCalValue { private final Value dataType; private final List<JsonValue> values; /** * Creates a new jCal value. * @param dataType the data type or null for "unknown" * @param values the values */ public JCalValue(Value dataType, List<JsonValue> values) { this.dataType = dataType; this.values = Collections.unmodifiableList(values); } /** * Creates a new jCal value. * @param dataType the data type or null for "unknown" * @param values the values */ public JCalValue(Value dataType, JsonValue... values) { this.dataType = dataType; this.values = Arrays.asList(values); //unmodifiable } /** * Creates a single-valued value. * @param dataType the data type or null for "unknown" * @param value the value * @return the jCal value */ public static JCalValue single(Value dataType, Object value) { return new JCalValue(dataType, new JsonValue(value)); } /** * Creates a multi-valued value. * @param dataType the data type or null for "unknown" * @param values the values * @return the jCal value */ public static JCalValue multi(Value dataType, Object... values) { return multi(dataType, Arrays.asList(values)); } /** * Creates a multi-valued value. * @param dataType the data type or null for "unknown" * @param values the values * @return the jCal value */ public static JCalValue multi(Value dataType, List<?> values) { List<JsonValue> multiValues = new ArrayList<JsonValue>(values.size()); for (Object value : values) { multiValues.add(new JsonValue(value)); } return new JCalValue(dataType, multiValues); } /** * Creates a structured value. * @param dataType the data type or null for "unknown" * @param values the values * @return the jCal value */ public static JCalValue structured(Value dataType, Object... values) { return structured(dataType, Arrays.asList(values)); } /** * Creates a structured value. * @param dataType the data type or null for "unknown" * @param values the values * @return the jCal value */ public static JCalValue structured(Value dataType, List<?> values) { //TODO this should accept a "list of lists" List<JsonValue> array = new ArrayList<JsonValue>(values.size()); for (Object value : values) { array.add(new JsonValue(value)); } return new JCalValue(dataType, new JsonValue(array)); } /** * Creates an object value. * @param dataType the data type or null for "unknown" * @param value the object * @return the jCal value */ public static JCalValue object(Value dataType, ListMultimap<String, Object> value) { Map<String, JsonValue> object = new LinkedHashMap<String, JsonValue>(); for (Map.Entry<String, List<Object>> entry : value) { String key = entry.getKey(); List<Object> list = entry.getValue(); JsonValue v; if (list.size() == 1) { v = new JsonValue(list.get(0)); } else { List<JsonValue> array = new ArrayList<JsonValue>(list.size()); for (Object element : list) { array.add(new JsonValue(element)); } v = new JsonValue(array); } object.put(key, v); } return new JCalValue(dataType, new JsonValue(object)); } /** * Gets the jCard data type * @return the data type or null for "unknown" */ public Value getDataType() { return dataType; } /** * Gets all the JSON values. * @return the JSON values */ public List<JsonValue> getValues() { return values; } /** * Gets the value of a single-valued property (such as {@link Summary}). * @return the value or null if not found */ public String getSingleValued() { if (values.isEmpty()) { return null; } JsonValue first = values.get(0); if (first.isNull()) { return null; } Object obj = first.getValue(); if (obj != null) { return obj.toString(); } //get the first element of the array List<JsonValue> array = first.getArray(); if (array != null && !array.isEmpty()) { obj = array.get(0).getValue(); if (obj != null) { return obj.toString(); } } return null; } /** * Gets the value of a structured property (such as {@link RequestStatus}). * @return the values or empty list if not found */ public List<String> getStructured() { //TODO this should return a "list of lists" if (values.isEmpty()) { return Collections.emptyList(); } JsonValue first = values.get(0); //["request-status", {}, "text", ["2.0", "Success"] ] List<JsonValue> array = first.getArray(); if (array != null) { List<String> values = new ArrayList<String>(array.size()); for (JsonValue value : array) { if (value.isNull()) { values.add(null); continue; } Object obj = value.getValue(); if (obj != null) { values.add(obj.toString()); } } return values; } //get the first value if it's not enclosed in an array //["request-status", {}, "text", "2.0"] Object obj = first.getValue(); if (obj != null) { List<String> values = new ArrayList<String>(1); values.add(obj.toString()); return values; } //["request-status", {}, "text", null] if (first.isNull()) { List<String> values = new ArrayList<String>(1); values.add(null); return values; } return Collections.emptyList(); } /** * Gets the value of a multi-valued property (such as {@link Categories}). * @return the values or empty list if not found */ public List<String> getMultivalued() { if (values.isEmpty()) { return Collections.emptyList(); } List<String> multi = new ArrayList<String>(values.size()); for (JsonValue value : values) { if (value.isNull()) { multi.add(null); continue; } Object obj = value.getValue(); if (obj != null) { multi.add(obj.toString()); } } return multi; } /** * Gets the value of a property whose value is an object (such as the * {@link RecurrenceRule} property). * @return the object or an empty map if not found */ public ListMultimap<String, String> getObject() { if (values.isEmpty()) { return new ListMultimap<String, String>(0); } Map<String, JsonValue> map = values.get(0).getObject(); if (map == null) { return new ListMultimap<String, String>(0); } ListMultimap<String, String> values = new ListMultimap<String, String>(); for (Map.Entry<String, JsonValue> entry : map.entrySet()) { String key = entry.getKey(); JsonValue value = entry.getValue(); if (value.isNull()) { values.put(key, null); continue; } Object obj = value.getValue(); if (obj != null) { values.put(key, obj.toString()); continue; } List<JsonValue> array = value.getArray(); if (array != null) { for (JsonValue element : array) { obj = element.getValue(); if (obj != null) { values.put(key, obj.toString()); } } } } return values; } }
package common.model; import java.util.ArrayList; import java.util.List; public final class PaginatedList<T> { public static final int DEFAULT_CURRENT_PAGE_NUMBER = 1; public static final int DEFAULT_PAGE_SIZE = 10; public static final int DEFAULT_RANGE_SIZE = 2; private long allRecordCount; private Object searchCondition; private int currentPageNumber; private int pageSize; private int pageRangeSize; private List<T> currentPage; /** * . * * @param currentPageNumber * */ public PaginatedList(Integer currentPageNumber) { this(currentPageNumber, null, null); } /** * . * * @param currentPageNumber * * @param pageSize * 1 * @param pageRangeSize * */ public PaginatedList(Integer currentPageNumber, Integer pageSize, Integer pageRangeSize) { this.currentPageNumber = currentPageNumber != null ? currentPageNumber : DEFAULT_CURRENT_PAGE_NUMBER; this.pageSize = pageSize != null ? pageSize : DEFAULT_PAGE_SIZE; this.pageRangeSize = pageRangeSize != null ? pageRangeSize : DEFAULT_RANGE_SIZE; } /** * . * * @return */ public long getAllRecordCount() { return allRecordCount; } /** * . * * @param allRecordCount * */ public void setAllRecordCount(long allRecordCount) { this.allRecordCount = allRecordCount; } /** * . * * @return */ public Object getSearchCondition() { return searchCondition; } /** * . * * @param searchCondition * */ public void setSearchCondition(Object searchCondition) { this.searchCondition = searchCondition; } /** * . * * @return */ public int getCurrentPageNumber() { return currentPageNumber; } /** * . * * @param currentPageNumber * */ public void setCurrentPageNumber(int currentPageNumber) { this.currentPageNumber = currentPageNumber; } /** * 1. * * @return 1 */ public int getPageSize() { return pageSize; } /** * . * * @return */ public int getPageRangeSize() { return pageRangeSize; } /** * . * * @return */ public List<T> getCurrentPage() { return currentPage; } /** * . * * @param page * */ public void setCurrentPage(List<T> page) { this.currentPage = page; } /** * . * * @return */ public int getAllPageCount() { return (int) Math.ceil(((double) allRecordCount) / pageSize); } /** * . * * @return true , false */ public boolean isExistPrePage() { return getPrePageNumber() > 0; } /** * . * * @return true , false */ public boolean isExistNextPage() { return getNextPageNumber() < getAllPageCount() + 1; } /** * . * * @return */ public int getPrePageNumber() { return currentPageNumber - 1; } /** * . * * @return */ public int getNextPageNumber() { return currentPageNumber + 1; } /** * . * * @return */ public int getCurrentStartRecordNumber() { return (currentPageNumber - 1) * pageSize + 1; } /** * . * * @return */ public int getCurrentEndRecordNumber() { return currentPageNumber * pageSize; } /** * . * * @return */ public List<Integer> getPageNumberList() { List<Integer> pageNumberList = new ArrayList<>(); pageNumberList.add(currentPageNumber); int limitSize = pageRangeSize * 2 + 1; if (currentPageNumber > 1) { for (int i = currentPageNumber - 1; pageNumberList.size() <= pageRangeSize && i > 0; i pageNumberList.add(0, i); } } for (int i = currentPageNumber + 1; pageNumberList.size() < limitSize && i <= getAllPageCount(); i++) { pageNumberList.add(i); } for (int i = pageNumberList.get(0) - 1; pageNumberList.size() < limitSize && i > 0; i pageNumberList.add(0, i); } return pageNumberList; } }
package fitnesse.wiki.fs; import static fitnesse.wiki.VersionInfo.makeVersionInfo; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Date; import fitnesse.wiki.PageData; import fitnesse.wiki.VersionInfo; import fitnesse.wiki.WikiPageProperties; public class SimpleFileVersionsController implements VersionsController { public static final String contentFilename = "content.txt"; public static final String propertiesFilename = "properties.xml"; private final FileSystem fileSystem; public SimpleFileVersionsController(FileSystem fileSystem) { this.fileSystem = fileSystem; } @Override public void setHistoryDepth(int historyDepth) { // Just one file, no history } @Override public PageData getRevisionData(FileSystemPage page, String label) { final PageData pagedata = new PageData(page); loadContent(page, pagedata); loadAttributes(page, pagedata); return pagedata; } @Override public Collection<VersionInfo> history(FileSystemPage page) { return Arrays.asList(makeVersion(page, getRevisionData(page, ""))); } @Override public VersionInfo makeVersion(FileSystemPage page, PageData data) { createDirectoryIfNewPage(page); saveContent(page, data.getContent()); saveAttributes(page, data.getProperties()); return makeVersionInfo(data); } @Override public VersionInfo getCurrentVersion(FileSystemPage page) { return makeVersionInfo(getRevisionData(page, null)); } @Override public void delete(FileSystemPage page) { fileSystem.delete(page.getFileSystemPath()); } private void createDirectoryIfNewPage(final FileSystemPage page) { String pagePath = page.getFileSystemPath(); if (!fileSystem.exists(pagePath)) { try { fileSystem.makeDirectory(pagePath); } catch (IOException e) { throw new RuntimeException("Unable to create directory for new page", e); } } } protected synchronized void saveContent(final FileSystemPage page, String content) { if (content == null) { return; } final String separator = System.getProperty("line.separator"); if (content.endsWith("|")) { content += separator; } //First replace every windows style to unix content = content.replaceAll("\r\n", "\n"); //Then do the replace to match the OS. This works around //a strange behavior on windows. content = content.replaceAll("\n", separator); String contentPath = page.getFileSystemPath() + "/" + contentFilename; try { fileSystem.makeFile(contentPath, content); } catch (IOException e) { throw new RuntimeException(e); } } protected synchronized void saveAttributes(final FileSystemPage page, final WikiPageProperties attributes) { String propertiesFilePath = "<unknown>"; try { propertiesFilePath = page.getFileSystemPath() + "/" + propertiesFilename; WikiPageProperties propertiesToSave = new WikiPageProperties(attributes); removeAlwaysChangingProperties(propertiesToSave); fileSystem.makeFile(propertiesFilePath, propertiesToSave.toXml()); } catch (final Exception e) { throw new RuntimeException("Failed to save properties file: \"" + propertiesFilePath + "\" (exception: " + e + ").", e); } } private void removeAlwaysChangingProperties(WikiPageProperties properties) { properties.remove(PageData.PropertyLAST_MODIFIED); } private void loadContent(final FileSystemPage page, final PageData data) { String content = ""; final String name = page.getFileSystemPath() + "/" + contentFilename; try { if (fileSystem.exists(name)) { content = fileSystem.getContent(name); } data.setContent(content); } catch (IOException e) { throw new RuntimeException("Error while loading content", e); } } private void loadAttributes(final FileSystemPage page, final PageData data) { final String path = page.getFileSystemPath() + "/" + propertiesFilename; if (fileSystem.exists(path)) { try { long lastModifiedTime = getLastModifiedTime(page); String propertiesXml = fileSystem.getContent(path); final WikiPageProperties props = parsePropertiesXml(propertiesXml, lastModifiedTime); data.setProperties(props); } catch (final Exception e) { System.err.println("Could not read properties file: " + path); e.printStackTrace(); } } } private long getLastModifiedTime(final FileSystemPage page) { final String path = page.getFileSystemPath() + "/" + contentFilename; return fileSystem.lastModified(path); } public static WikiPageProperties parsePropertiesXml(String propertiesXml, long lastModifiedTime) { final WikiPageProperties props = new WikiPageProperties(); props.loadFromXml(propertiesXml); props.setLastModificationTime(new Date(lastModifiedTime)); return props; } }
package flatland.protobuf; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import ordered_map.core.OrderedMap; import ordered_set.core.OrderedSet; import clojure.lang.APersistentMap; import clojure.lang.ASeq; import clojure.lang.IFn; import clojure.lang.IMapEntry; import clojure.lang.IObj; import clojure.lang.IPersistentCollection; import clojure.lang.IPersistentMap; import clojure.lang.IPersistentVector; import clojure.lang.ISeq; import clojure.lang.ITransientMap; import clojure.lang.ITransientSet; import clojure.lang.Keyword; import clojure.lang.MapEntry; import clojure.lang.Numbers; import clojure.lang.Obj; import clojure.lang.PersistentArrayMap; import clojure.lang.PersistentVector; import clojure.lang.RT; import clojure.lang.SeqIterator; import clojure.lang.Sequential; import clojure.lang.Symbol; import clojure.lang.Var; import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedOutputStream; import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DescriptorProtos.FieldOptions; import com.google.protobuf.Descriptors; import com.google.protobuf.DynamicMessage; import com.google.protobuf.GeneratedMessage; import com.google.protobuf.InvalidProtocolBufferException; public class PersistentProtocolBufferMap extends APersistentMap implements IObj { public static class Def { public static interface NamingStrategy { /** * Given a Clojure map key, return the string to be used as the protobuf message field name. */ String protoName(Object clojureName); /** * Given a protobuf message field name, return a Clojure object suitable for use as a map key. */ Object clojureName(String protoName); } // we want this to work for anything Named, so use clojure.core/name public static final Var NAME_VAR = Var.intern(RT.CLOJURE_NS, Symbol.intern("name")); public static final String nameStr(Object named) { try { return (String)((IFn)NAME_VAR.deref()).invoke(named); } catch (Exception e) { return null; } } public static final NamingStrategy protobufNames = new NamingStrategy() { @Override public String protoName(Object name) { return nameStr(name); } @Override public Object clojureName(String name) { return Keyword.intern(name.toLowerCase()); } @Override public String toString() { return "[protobuf names]"; } }; public static final NamingStrategy convertUnderscores = new NamingStrategy() { @Override public String protoName(Object name) { return nameStr(name).replaceAll("-", "_"); } @Override public Object clojureName(String name) { return Keyword.intern(name.replaceAll("_", "-").toLowerCase()); } @Override public String toString() { return "[convert underscores]"; } }; public final Descriptors.Descriptor type; public final NamingStrategy namingStrategy; public final int sizeLimit; public static final Object NULL = new Object(); // keys should be FieldDescriptors, except that NULL is used as a replacement for real null ConcurrentHashMap<Object, Object> key_to_field; private static final class DefOptions { public final Descriptors.Descriptor type; public final NamingStrategy strat; public final int sizeLimit; public DefOptions(Descriptors.Descriptor type, NamingStrategy strat, int sizeLimit) { this.type = type; this.strat = strat; this.sizeLimit = sizeLimit; } public boolean equals(Object other) { if (this.getClass() != other.getClass()) return false; DefOptions od = (DefOptions)other; return type.equals(od.type) && strat.equals(od.strat) && sizeLimit == od.sizeLimit; } public int hashCode() { return type.hashCode() + strat.hashCode() + sizeLimit; } } static ConcurrentHashMap<DefOptions, Def> defCache = new ConcurrentHashMap<DefOptions, Def>(); public static Def create(Descriptors.Descriptor type, NamingStrategy strat, int sizeLimit) { DefOptions opts = new DefOptions(type, strat, sizeLimit); Def def = defCache.get(type); if (def == null) { def = new Def(type, strat, sizeLimit); defCache.putIfAbsent(opts, def); } return def; } protected Def(Descriptors.Descriptor type, NamingStrategy strat, int sizeLimit) { this.type = type; this.key_to_field = new ConcurrentHashMap<Object, Object>(); this.namingStrategy = strat; this.sizeLimit = sizeLimit; } public DynamicMessage parseFrom(byte[] bytes) throws InvalidProtocolBufferException { return DynamicMessage.parseFrom(type, bytes); } public DynamicMessage parseFrom(CodedInputStream input) throws IOException { input.setSizeLimit(sizeLimit); return DynamicMessage.parseFrom(type, input); } public DynamicMessage.Builder parseDelimitedFrom(InputStream input) throws IOException { DynamicMessage.Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder; } else { return null; } } public DynamicMessage.Builder newBuilder() { return DynamicMessage.newBuilder(type); } public Descriptors.FieldDescriptor fieldDescriptor(Object key) { if (key == null) { return null; } if (key instanceof Descriptors.FieldDescriptor) { return (Descriptors.FieldDescriptor)key; } else { Object field = key_to_field.get(key); if (field != null) { if (field == NULL) { return null; } return (Descriptors.FieldDescriptor)field; } else { field = type.findFieldByName(namingStrategy.protoName(key)); key_to_field.putIfAbsent(key, field == null ? NULL : field); } return (Descriptors.FieldDescriptor)field; } } public String getName() { return type.getName(); } public String getFullName() { return type.getFullName(); } public Descriptors.Descriptor getMessageType() { return type; } static final ConcurrentHashMap<NamingStrategy, ConcurrentHashMap<String, Object>> caches = new ConcurrentHashMap<NamingStrategy, ConcurrentHashMap<String, Object>>(); static final Object nullv = new Object(); public Object intern(String name) { ConcurrentHashMap<String, Object> nameCache = caches.get(namingStrategy); if (nameCache == null) { nameCache = new ConcurrentHashMap<String, Object>(); ConcurrentHashMap<String, Object> existing = caches.putIfAbsent(namingStrategy, nameCache); if (existing != null) { nameCache = existing; } } Object clojureName = nameCache.get(name); if (clojureName == null) { if (name == "") { clojureName = nullv; } else { clojureName = namingStrategy.clojureName(name); if (clojureName == null) { clojureName = nullv; } } Object existing = nameCache.putIfAbsent(name, clojureName); if (existing != null) { clojureName = existing; } } return clojureName == nullv ? null : clojureName; } public Object clojureEnumValue(Descriptors.EnumValueDescriptor enum_value) { return intern(enum_value.getName()); } protected Object mapFieldBy(Descriptors.FieldDescriptor field) { return intern(field.getOptions().getExtension(Extensions.mapBy)); } protected PersistentProtocolBufferMap mapValue(Descriptors.FieldDescriptor field, PersistentProtocolBufferMap left, PersistentProtocolBufferMap right) { if (left == null) { return right; } else { Object map_exists = intern(field.getOptions().getExtension(Extensions.mapExists)); if (map_exists != null) { if (left.valAt(map_exists) == Boolean.FALSE && right.valAt(map_exists) == Boolean.TRUE) { return right; } else { return left.append(right); } } Object map_deleted = intern(field.getOptions().getExtension(Extensions.mapDeleted)); if (map_deleted != null) { if (left.valAt(map_deleted) == Boolean.TRUE && right.valAt(map_deleted) == Boolean.FALSE) { return right; } else { return left.append(right); } } return left.append(right); } } } public final Def def; private final DynamicMessage message; private final IPersistentMap _meta; private final IPersistentMap ext; static public PersistentProtocolBufferMap create(Def def, byte[] bytes) throws InvalidProtocolBufferException { DynamicMessage message = def.parseFrom(bytes); return new PersistentProtocolBufferMap(null, def, message); } static public PersistentProtocolBufferMap parseFrom(Def def, CodedInputStream input) throws IOException { DynamicMessage message = def.parseFrom(input); return new PersistentProtocolBufferMap(null, def, message); } static public PersistentProtocolBufferMap parseDelimitedFrom(Def def, InputStream input) throws IOException { DynamicMessage.Builder builder = def.parseDelimitedFrom(input); if (builder != null) { return new PersistentProtocolBufferMap(null, def, builder); } else { return null; } } static public PersistentProtocolBufferMap construct(Def def, Object keyvals) { PersistentProtocolBufferMap protobuf = new PersistentProtocolBufferMap(null, def); return protobuf.cons(keyvals); } protected PersistentProtocolBufferMap(IPersistentMap meta, Def def) { this._meta = meta; this.ext = null; this.def = def; this.message = null; } protected PersistentProtocolBufferMap(IPersistentMap meta, Def def, DynamicMessage message) { this._meta = meta; this.ext = null; this.def = def; this.message = message; } protected PersistentProtocolBufferMap(IPersistentMap meta, IPersistentMap ext, Def def, DynamicMessage message) { this._meta = meta; this.ext = ext; this.def = def; this.message = message; } protected PersistentProtocolBufferMap(IPersistentMap meta, Def def, DynamicMessage.Builder builder) { this._meta = meta; this.ext = null; this.def = def; this.message = builder.build(); } protected PersistentProtocolBufferMap(IPersistentMap meta, IPersistentMap ext, Def def, DynamicMessage.Builder builder) { this._meta = meta; this.ext = ext; this.def = def; this.message = builder.build(); } public byte[] toByteArray() { return message().toByteArray(); } public void writeTo(CodedOutputStream output) throws IOException { message().writeTo(output); } public void writeDelimitedTo(OutputStream output) throws IOException { message().writeDelimitedTo(output); } public Descriptors.Descriptor getMessageType() { return def.getMessageType(); } protected DynamicMessage message() { if (message == null) { return def.newBuilder().build(); // This will only work if an empty message is valid. } else { return message; } } protected DynamicMessage.Builder builder() { if (message == null) { return def.newBuilder(); } else { return message.toBuilder(); } } protected Object fromProtoValue(Descriptors.FieldDescriptor field, Object value) { return fromProtoValue(field, value, true); } static Keyword k_key = Keyword.intern("key"); static Keyword k_val = Keyword.intern("val"); static Keyword k_item = Keyword.intern("item"); static Keyword k_exists = Keyword.intern("exists"); protected Object fromProtoValue(Descriptors.FieldDescriptor field, Object value, boolean use_extensions) { if (value instanceof List) { List<?> values = (List<?>)value; Iterator<?> iterator = values.iterator(); if (use_extensions) { Object map_field_by = def.mapFieldBy(field); DescriptorProtos.FieldOptions options = field.getOptions(); if (map_field_by != null) { ITransientMap map = (ITransientMap)OrderedMap.EMPTY.asTransient(); while (iterator.hasNext()) { PersistentProtocolBufferMap v = (PersistentProtocolBufferMap)fromProtoValue(field, iterator.next()); Object k = v.valAt(map_field_by); PersistentProtocolBufferMap existing = (PersistentProtocolBufferMap)map.valAt(k); map = map.assoc(k, def.mapValue(field, existing, v)); } return map.persistent(); } else if (options.getExtension(Extensions.counter)) { Object count = iterator.next(); while (iterator.hasNext()) { count = Numbers.add(count, iterator.next()); } return count; } else if (options.getExtension(Extensions.succession)) { return fromProtoValue(field, values.get(values.size() - 1)); } else if (options.getExtension(Extensions.map)) { Descriptors.Descriptor type = field.getMessageType(); Descriptors.FieldDescriptor key_field = type.findFieldByName("key"); Descriptors.FieldDescriptor val_field = type.findFieldByName("val"); ITransientMap map = (ITransientMap)OrderedMap.EMPTY.asTransient(); while (iterator.hasNext()) { DynamicMessage message = (DynamicMessage)iterator.next(); Object k = fromProtoValue(key_field, message.getField(key_field)); Object v = fromProtoValue(val_field, message.getField(val_field)); Object existing = map.valAt(k); if (existing instanceof PersistentProtocolBufferMap) { map = map.assoc(k, def.mapValue(field, (PersistentProtocolBufferMap)existing, (PersistentProtocolBufferMap)v)); } else if (existing instanceof IPersistentCollection) { map = map.assoc(k, ((IPersistentCollection)existing).cons(v)); } else { map = map.assoc(k, v); } } return map.persistent(); } else if (options.getExtension(Extensions.set)) { Descriptors.Descriptor type = field.getMessageType(); Descriptors.FieldDescriptor item_field = type.findFieldByName("item"); Descriptors.FieldDescriptor exists_field = type.findFieldByName("exists"); ITransientSet set = (ITransientSet)OrderedSet.EMPTY.asTransient(); while (iterator.hasNext()) { DynamicMessage message = (DynamicMessage)iterator.next(); Object item = fromProtoValue(item_field, message.getField(item_field)); Boolean exists = (Boolean)message.getField(exists_field); if (exists) { set = (ITransientSet)set.conj(item); } else { try { set = set.disjoin(item); } catch (Exception e) { e.printStackTrace(); } } } return set.persistent(); } } List<Object> list = new ArrayList<Object>(values.size()); while (iterator.hasNext()) { list.add(fromProtoValue(field, iterator.next(), use_extensions)); } return PersistentVector.create(list); } else { switch (field.getJavaType()) { case ENUM: Descriptors.EnumValueDescriptor e = (Descriptors.EnumValueDescriptor)value; if (use_extensions && field.getOptions().getExtension(Extensions.nullable) && field.getOptions().getExtension(nullExtension(field)).equals(e.getNumber())) { return null; } else { return def.clojureEnumValue(e); } case MESSAGE: Def fieldDef = PersistentProtocolBufferMap.Def.create(field.getMessageType(), this.def.namingStrategy, this.def.sizeLimit); DynamicMessage message = (DynamicMessage)value; // Total hack because getField() doesn't return an empty array for repeated messages. if (field.isRepeated() && !message.isInitialized()) { return fromProtoValue(field, new ArrayList<Object>(), use_extensions); } return new PersistentProtocolBufferMap(null, fieldDef, message); default: if (use_extensions && field.getOptions().getExtension(Extensions.nullable) && field.getOptions().getExtension(nullExtension(field)).equals(value)) { return null; } else { return value; } } } } protected Object toProtoValue(Descriptors.FieldDescriptor field, Object value) { if (value == null && field.getOptions().getExtension(Extensions.nullable)) { value = field.getOptions().getExtension(nullExtension(field)); if (field.getJavaType() == Descriptors.FieldDescriptor.JavaType.ENUM) { Descriptors.EnumDescriptor enum_type = field.getEnumType(); Descriptors.EnumValueDescriptor enum_value = enum_type.findValueByNumber((Integer)value); if (enum_value == null) { PrintWriter err = (PrintWriter)RT.ERR.deref(); err.format("invalid enum number %s for enum type %s\n", value, enum_type.getFullName()); } return enum_value; } } switch (field.getJavaType()) { case LONG: return ((Number)value).longValue(); case INT: return ((Number)value).intValue(); case FLOAT: return ((Number)value).floatValue(); case DOUBLE: return ((Number)value).doubleValue(); case ENUM: String name = def.namingStrategy.protoName(value); Descriptors.EnumDescriptor enum_type = field.getEnumType(); Descriptors.EnumValueDescriptor enum_value = enum_type.findValueByName(name); if (enum_value == null) { PrintWriter err = (PrintWriter)RT.ERR.deref(); err.format("invalid enum value %s for enum type %s\n", name, enum_type.getFullName()); } return enum_value; case MESSAGE: PersistentProtocolBufferMap protobuf; if (value instanceof PersistentProtocolBufferMap) { protobuf = (PersistentProtocolBufferMap)value; } else { Def fieldDef = PersistentProtocolBufferMap.Def.create(field.getMessageType(), this.def.namingStrategy, this.def.sizeLimit); protobuf = PersistentProtocolBufferMap.construct(fieldDef, value); } return protobuf.message(); default: return value; } } static protected GeneratedMessage.GeneratedExtension<FieldOptions, ?> nullExtension( Descriptors.FieldDescriptor field) { switch (field.getJavaType()) { case LONG: return Extensions.nullLong; case INT: return Extensions.nullInt; case FLOAT: return Extensions.nullFloat; case DOUBLE: return Extensions.nullDouble; case STRING: return Extensions.nullString; case ENUM: return Extensions.nullEnum; default: return null; } } protected void addRepeatedField(DynamicMessage.Builder builder, Descriptors.FieldDescriptor field, Object value) { try { builder.addRepeatedField(field, value); } catch (Exception e) { String msg = String.format("error adding %s to %s field %s", value, field.getJavaType().toString().toLowerCase(), field.getFullName()); throw new IllegalArgumentException(msg, e); } } protected void setField(DynamicMessage.Builder builder, Descriptors.FieldDescriptor field, Object value) { try { builder.setField(field, value); } catch (IllegalArgumentException e) { String msg = String.format("error setting %s field %s to %s", field.getJavaType().toString().toLowerCase(), field.getFullName(), value); throw new IllegalArgumentException(msg, e); } } // returns true if the protobuf can store this key protected boolean addField(DynamicMessage.Builder builder, Object key, Object value) { if (key == null) { return false; } Descriptors.FieldDescriptor field = def.fieldDescriptor(key); if (field == null) { return false; } if (value == null && !(field.getOptions().getExtension(Extensions.nullable))) { return true; } boolean set = field.getOptions().getExtension(Extensions.set); if (field.isRepeated()) { builder.clearField(field); if (value instanceof Sequential && !set) { for (ISeq s = RT.seq(value); s != null; s = s.next()) { Object v = toProtoValue(field, s.first()); addRepeatedField(builder, field, v); } } else { Object map_field_by = def.mapFieldBy(field); if (map_field_by != null) { String field_name = def.namingStrategy.protoName(map_field_by); for (ISeq s = RT.seq(value); s != null; s = s.next()) { Map.Entry<?, ?> e = (Map.Entry<?, ?>)s.first(); IPersistentMap map = (IPersistentMap)e.getValue(); Object k = e.getKey(); Object v = toProtoValue(field, map.assoc(map_field_by, k).assoc(field_name, k)); addRepeatedField(builder, field, v); } } else if (field.getOptions().getExtension(Extensions.map)) { for (ISeq s = RT.seq(value); s != null; s = s.next()) { Map.Entry<?, ?> e = (Map.Entry<?, ?>)s.first(); Object[] map = {k_key, e.getKey(), k_val, e.getValue()}; addRepeatedField(builder, field, toProtoValue(field, new PersistentArrayMap(map))); } } else if (set) { Object k, v; boolean isMap = (value instanceof IPersistentMap); for (ISeq s = RT.seq(value); s != null; s = s.next()) { if (isMap) { Map.Entry<?, ?> e = (Map.Entry<?, ?>)s.first(); k = e.getKey(); v = e.getValue(); } else { k = s.first(); v = true; } Object[] map = {k_item, k, k_exists, v}; addRepeatedField(builder, field, toProtoValue(field, new PersistentArrayMap(map))); } } else { addRepeatedField(builder, field, toProtoValue(field, value)); } } } else { Object v = toProtoValue(field, value); if (v instanceof DynamicMessage) { v = ((DynamicMessage)builder.getField(field)).toBuilder().mergeFrom((DynamicMessage)v).build(); } setField(builder, field, v); } return true; } @Override public PersistentProtocolBufferMap withMeta(IPersistentMap meta) { if (meta == meta()) { return this; } return new PersistentProtocolBufferMap(meta, ext, def, message); } @Override public IPersistentMap meta() { return _meta; } @Override public boolean containsKey(Object key) { return protoContainsKey(key) || RT.booleanCast(RT.contains(ext, key)); } private boolean protoContainsKey(Object key) { Descriptors.FieldDescriptor field = def.fieldDescriptor(key); if (field == null) { return false; } else if (field.isRepeated()) { return message().getRepeatedFieldCount(field) > 0; } else { return message().hasField(field) || field.hasDefaultValue(); } } private static final Object sentinel = new Object(); @Override public IMapEntry entryAt(Object key) { Object value = valAt(key, sentinel); return (value == sentinel) ? null : new MapEntry(key, value); } @Override public Object valAt(Object key) { return getValAt(key, true); } @Override public Object valAt(Object key, Object notFound) { return getValAt(key, notFound, true); } public Object getValAt(Object key, boolean use_extensions) { Object val = getValAt(key, sentinel, use_extensions); return (val == sentinel) ? null : val; } public Object getValAt(Object key, Object notFound, boolean use_extensions) { Descriptors.FieldDescriptor field = def.fieldDescriptor(key); if (protoContainsKey(key)) { return fromProtoValue(field, message().getField(field), use_extensions); } else { return RT.get(ext, key, notFound); } } protected boolean getBoolean (Object key) { Object val = valAt(key); return val != null && val != Boolean.FALSE; } @Override public PersistentProtocolBufferMap assoc(Object key, Object value) { DynamicMessage.Builder builder = builder(); if (addField(builder, key, value)) { return new PersistentProtocolBufferMap(meta(), ext, def, builder); } else { return new PersistentProtocolBufferMap(meta(), (IPersistentMap)RT.assoc(ext, key, value), def, builder); } } @Override public PersistentProtocolBufferMap assocEx(Object key, Object value) { if (containsKey(key)) { throw new RuntimeException("Key already present"); } return assoc(key, value); } @Override public PersistentProtocolBufferMap cons(Object o) { if (o instanceof Map.Entry) { Map.Entry<?, ?> e = (Map.Entry<?, ?>)o; return assoc(e.getKey(), e.getValue()); } else if (o instanceof IPersistentVector) { IPersistentVector v = (IPersistentVector)o; if (v.count() != 2) { throw new IllegalArgumentException("Vector arg to map conj must be a pair"); } return assoc(v.nth(0), v.nth(1)); } else { DynamicMessage.Builder builder = builder(); IPersistentMap ext = this.ext; for (ISeq s = RT.seq(o); s != null; s = s.next()) { Map.Entry<?, ?> e = (Map.Entry<?, ?>)s.first(); Object k = e.getKey(), v = e.getValue(); if (!addField(builder, k, v)) { ext = (IPersistentMap)RT.assoc(ext, k, v); } } return new PersistentProtocolBufferMap(meta(), ext, def, builder); } } public PersistentProtocolBufferMap append(IPersistentMap map) { PersistentProtocolBufferMap proto; if (map instanceof PersistentProtocolBufferMap) { proto = (PersistentProtocolBufferMap)map; } else { proto = construct(def, map); } return new PersistentProtocolBufferMap(meta(), ext, def, builder().mergeFrom(proto.message())); } @Override public IPersistentMap without(Object key) { Descriptors.FieldDescriptor field = def.fieldDescriptor(key); if (field == null) { IPersistentMap newExt = (IPersistentMap)RT.dissoc(ext, key); if (newExt == ext) { return this; } return new PersistentProtocolBufferMap(meta(), newExt, def, builder()); } if (field.isRequired()) { throw new RuntimeException("Can't remove required field"); } return new PersistentProtocolBufferMap(meta(), ext, def, builder().clearField(field)); } @Override public Iterator<?> iterator() { return new SeqIterator(seq()); } @Override public int count() { int count = RT.count(ext); for (Descriptors.FieldDescriptor field : def.type.getFields()) { if (protoContainsKey(field)) { count++; } } return count; } @Override public ISeq seq() { return Seq.create(null, this, RT.seq(def.type.getFields())); } @Override public IPersistentCollection empty() { return new PersistentProtocolBufferMap(meta(), null, def, builder().clear()); } private static class Seq extends ASeq { private final PersistentProtocolBufferMap proto; private final MapEntry first; private final ISeq fields; public static ISeq create(IPersistentMap meta, PersistentProtocolBufferMap proto, ISeq fields) { for (ISeq s = fields; s != null; s = s.next()) { Descriptors.FieldDescriptor field = (Descriptors.FieldDescriptor)s.first(); Object k = proto.def.intern(field.getName()); Object v = proto.valAt(k, sentinel); if (v != sentinel) { return new Seq(meta, proto, new MapEntry(k, v), s); } } return RT.seq(proto.ext); } protected Seq(IPersistentMap meta, PersistentProtocolBufferMap proto, MapEntry first, ISeq fields) { super(meta); this.proto = proto; this.first = first; this.fields = fields; } @Override public Obj withMeta(IPersistentMap meta) { if (meta != meta()) { return new Seq(meta, proto, first, fields); } return this; } @Override public Object first() { return first; } @Override public ISeq next() { return create(meta(), proto, fields.next()); } } }
package froop.db.jpa.entity; import lombok.Data; import javax.persistence.*; import java.io.Serializable; @Entity @Data public class Sample implements Serializable { private static final long serialVersionUID = 1L; @Id private long id; private String name; protected Sample() {} }
package org.voltcore.agreement; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.jute_voltpatches.BinaryInputArchive; import org.apache.jute_voltpatches.BinaryOutputArchive; import org.apache.zookeeper_voltpatches.ZooDefs.OpCode; import org.apache.zookeeper_voltpatches.server.NIOServerCnxn; import org.apache.zookeeper_voltpatches.server.Request; import org.apache.zookeeper_voltpatches.server.ServerCnxn; import org.apache.zookeeper_voltpatches.server.ZooKeeperServer; import org.json_voltpatches.JSONObject; import org.voltcore.TransactionIdManager; import org.voltcore.logging.Level; import org.voltcore.logging.VoltLogger; import org.voltcore.messaging.AgreementTaskMessage; import org.voltcore.messaging.BinaryPayloadMessage; import org.voltcore.messaging.DisconnectFailedHostsCallback; import org.voltcore.messaging.FaultMessage; import org.voltcore.messaging.HeartbeatMessage; import org.voltcore.messaging.HeartbeatResponseMessage; import org.voltcore.messaging.LocalObjectMessage; import org.voltcore.messaging.Mailbox; import org.voltcore.messaging.RecoveryMessage; import org.voltcore.messaging.TransactionInfoBaseMessage; import org.voltcore.messaging.VoltMessage; import org.voltcore.utils.CoreUtils; import org.voltcore.utils.RateLimitedLogger; import com.google_voltpatches.common.collect.ImmutableSet; /* * A wrapper around a single node ZK server. The server is a modified version of ZK that speaks the ZK * wire protocol and data model, but has no durability. Agreement is provided * by the AgreementSite wrapper which contains a restricted priority queue like an execution site, * but also has a transaction id manager and a unique initiator id. The intiator ID and site id are the same * as the id of the regular txn initiator on this node. The mailbox used has a different ID so messages * for agreement are routed here. * * Recovery is implemented by shipping a complete snapshot at a txnid to the recovering node, then every node * ships all the agreement txns they know about to the recovering node. */ public class AgreementSite implements org.apache.zookeeper_voltpatches.server.ZooKeeperServer.Callout { private static final byte BINARY_PAYLOAD_SNAPSHOT = 0; private static final byte BINARY_PAYLOAD_JOIN_REQUEST = 1; private static enum RecoveryStage { WAITING_FOR_SAFETY, SENT_PROPOSAL, RECEIVED_SNAPSHOT, RECOVERED } private RecoveryStage m_recoveryStage = RecoveryStage.RECOVERED; private final CountDownLatch m_recoveryComplete = new CountDownLatch(1); private final ZooKeeperServer m_server; private final NIOServerCnxn.Factory m_cnxnFactory; private final Mailbox m_mailbox; private final TransactionIdManager m_idManager; private final RestrictedPriorityQueue m_txnQueue; private final long m_hsId; /* * Not failed sites */ private final TreeSet<Long> m_hsIds = new TreeSet<Long>(); private final HashMap<Long, OrderableTransaction> m_transactionsById = new HashMap<Long, OrderableTransaction>(); final AgreementTxnIdSafetyState m_safetyState; private volatile boolean m_shouldContinue = true; private volatile boolean m_recovering = false; private static final VoltLogger m_recoveryLog = new VoltLogger("REJOIN"); private static final VoltLogger m_agreementLog = new VoltLogger("AGREEMENT"); private long m_minTxnIdAfterRecovery = Long.MIN_VALUE; private final CountDownLatch m_shutdownComplete = new CountDownLatch(1); private byte m_recoverySnapshot[] = null; private Long m_recoverBeforeTxn = null; private Long m_siteRequestingRecovery = null; private final DisconnectFailedHostsCallback m_failedHostsCallback; private final MeshArbiter m_meshArbiter; public AgreementSite( long myAgreementHSId, Set<Long> agreementHSIds, int initiatorId, Mailbox mailbox, InetSocketAddress address, long backwardsTimeForgiveness, DisconnectFailedHostsCallback failedHostsCallback ) throws IOException { m_mailbox = mailbox; m_hsId = myAgreementHSId; m_hsIds.addAll(agreementHSIds); m_failedHostsCallback = failedHostsCallback; m_idManager = new TransactionIdManager( initiatorId, 0, backwardsTimeForgiveness ); // note, the agreement site always uses the safety dance, even // if it could skip it if there was one node m_txnQueue = new RestrictedPriorityQueue( myAgreementHSId, mailbox, true); m_safetyState = new AgreementTxnIdSafetyState(myAgreementHSId); for (Long hsId : m_hsIds) { m_txnQueue.ensureInitiatorIsKnown(hsId); m_safetyState.addState(hsId); } m_meshArbiter = new MeshArbiter(m_hsId, mailbox, m_meshAide); m_cnxnFactory = new NIOServerCnxn.Factory( address, 10); m_server = new ZooKeeperServer(this); if (agreementHSIds.size() > 1) { m_recovering = true; } if (m_recovering) { m_recoveryStage = RecoveryStage.WAITING_FOR_SAFETY; } else { m_recoveryComplete.countDown(); } } private Set<Long> m_threadIds; public void start() throws InterruptedException, IOException { m_threadIds = ImmutableSet.<Long>copyOf(m_cnxnFactory.startup(m_server)); } public Set<Long> getThreadIds() { return m_threadIds; } public void shutdown() throws InterruptedException { m_shouldContinue = false; m_shutdownComplete.await(); } private void shutdownInternal() { // note that shutdown will join the thread m_cnxnFactory.shutdown(); } public void recoveryRunLoop() throws Exception { long lastHeartbeatTime = System.currentTimeMillis(); while (m_recovering && m_shouldContinue) { if (m_recoveryStage == RecoveryStage.WAITING_FOR_SAFETY) { Long safeTxnId = m_txnQueue.safeToRecover(); if (safeTxnId != null) { m_recoveryStage = RecoveryStage.SENT_PROPOSAL; m_recoverBeforeTxn = safeTxnId; long sourceHSId = 0; for (Long hsId : m_hsIds) { if (hsId != m_hsId) { sourceHSId = hsId; break; } } RecoveryMessage recoveryMessage = new RecoveryMessage( m_hsId, safeTxnId, -1); m_mailbox.send( sourceHSId, recoveryMessage); } } VoltMessage message = m_mailbox.recvBlocking(5); if (message != null) { processMessage(message); } final long now = System.currentTimeMillis(); if (now - lastHeartbeatTime > 5) { lastHeartbeatTime = now; sendHeartbeats(); } if (m_recoverBeforeTxn == null) { continue; } if (m_txnQueue.peek() != null && m_txnQueue.peek().txnId < m_recoverBeforeTxn.longValue()) { m_transactionsById.remove(m_txnQueue.poll().txnId); } else if (m_recoveryStage == RecoveryStage.RECEIVED_SNAPSHOT) { processZKSnapshot(); return; } } } private long m_lastUsedTxnId = 0; @Override public void run() { try { if (m_recovering) { recoveryRunLoop(); } long lastHeartbeatTime = System.currentTimeMillis(); while (m_shouldContinue) { VoltMessage message = m_mailbox.recvBlocking(5); if (message != null) { processMessage(message); } final long now = System.currentTimeMillis(); if (now - lastHeartbeatTime > 5) { lastHeartbeatTime = now; sendHeartbeats(); } if (m_recovering) { continue; } OrderableTransaction ot = m_txnQueue.poll(); if (ot != null) { if (m_recoverBeforeTxn != null) { assert(m_recoveryStage == RecoveryStage.RECOVERED); assert(m_recovering == false); assert(m_siteRequestingRecovery != null); if (ot.txnId >= m_recoverBeforeTxn) { shipZKDatabaseSnapshot(m_siteRequestingRecovery, ot.txnId); } } if (ot.txnId < m_minTxnIdAfterRecovery) { String errMsg = "Transaction queue released a transaction from before this " + " node was recovered was complete"; org.voltdb.VoltDB.crashLocalVoltDB(errMsg, false, null); } m_transactionsById.remove(ot.txnId); if (ot instanceof AgreementRejoinTransactionState) { AgreementRejoinTransactionState txnState = (AgreementRejoinTransactionState)ot; try { processJoin(txnState.m_rejoiningSite); } finally { if (txnState.m_onCompletion != null) { txnState.m_onCompletion.countDown(); } } } else if (ot instanceof AgreementTransactionState) { AgreementTransactionState txnState = (AgreementTransactionState)ot; //Owner is what associates the session with a specific initiator //only used for createSession txnState.m_request.setOwner(txnState.initiatorHSId); /* * We may pull reads out of the priority queue outside the global * order. This means the txnid might be wrong so just sub * the last used txnid from a write that is guaranteed to have been globally * ordered properly * * It doesn't matter for the most part, but the ZK code we give the ID to expects to * it to always increase and if we pull reads in early that will not always be true. */ long txnIdToUse = txnState.txnId; switch (txnState.m_request.type) { case OpCode.exists: case OpCode.getChildren: case OpCode.getChildren2: case OpCode.getData: //Don't use the txnid generated for the read since //it may not be globally ordered with writes txnIdToUse = m_lastUsedTxnId; break; default: //This is a write, stash away the txnid for use //for future reads m_lastUsedTxnId = txnState.txnId; break; } m_server.prepRequest(txnState.m_request, txnIdToUse); } } else if (m_recoverBeforeTxn != null) { assert(m_recoveryStage == RecoveryStage.RECOVERED); assert(m_recovering == false); assert(m_siteRequestingRecovery != null); Long foo = m_txnQueue.safeToRecover(); if (foo != null && foo.longValue() >= m_recoverBeforeTxn.longValue()) { shipZKDatabaseSnapshot(m_siteRequestingRecovery, foo); } } } } catch (Throwable e) { org.voltdb.VoltDB.crashLocalVoltDB("Error in agreement site", true, e); } finally { try { shutdownInternal(); } catch (Exception e) { m_agreementLog.warn("Exception during agreement internal shutdown.", e); } finally { m_shutdownComplete.countDown(); } } } private void processJoin(long joiningAgreementSite) { m_safetyState.addState(joiningAgreementSite); m_txnQueue.ensureInitiatorIsKnown(joiningAgreementSite); m_hsIds.add(joiningAgreementSite); m_recoveryLog.info("Joining site " + CoreUtils.hsIdToString(joiningAgreementSite) + " known active sites " + CoreUtils.hsIdCollectionToString(m_hsIds)); } private void sendHeartbeats() { sendHeartbeats(m_hsIds); } private void sendHeartbeats(Set<Long> hsIds) { long txnId = m_idManager.getNextUniqueTransactionId(); for (long initiatorId : hsIds) { HeartbeatMessage heartbeat = new HeartbeatMessage( m_hsId, txnId, m_safetyState.getNewestGloballySafeTxnId()); m_mailbox.send( initiatorId, heartbeat); } } private long m_lastHeartbeatTime = System.nanoTime(); private void processMessage(VoltMessage message) throws Exception { if (!m_hsIds.contains(message.m_sourceHSId)) { String messageFormat = "Dropping message %s because it is not from a known up site"; RateLimitedLogger.tryLogForMessage(m_lastHeartbeatTime, 10000, TimeUnit.MILLISECONDS, m_agreementLog, Level.INFO, messageFormat, message); return; } if (message instanceof TransactionInfoBaseMessage) { TransactionInfoBaseMessage info = (TransactionInfoBaseMessage)message; // Special case heartbeats which only update RPQ if (info instanceof HeartbeatMessage) { // use the heartbeat to unclog the priority queue if clogged long lastSeenTxnFromInitiator = m_txnQueue.noteTransactionRecievedAndReturnLastSeen( info.getInitiatorHSId(), info.getTxnId(), ((HeartbeatMessage) info).getLastSafeTxnId()); // respond to the initiator with the last seen transaction HeartbeatResponseMessage response = new HeartbeatResponseMessage( m_hsId, lastSeenTxnFromInitiator, m_txnQueue.getQueueState() == RestrictedPriorityQueue.QueueState.BLOCKED_SAFETY); m_mailbox.send(info.getInitiatorHSId(), response); // we're done here (in the case of heartbeats) return; } assert(false); } else if (message instanceof HeartbeatResponseMessage) { HeartbeatResponseMessage hrm = (HeartbeatResponseMessage)message; m_safetyState.updateLastSeenTxnIdFromExecutorBySiteId( hrm.getExecHSId(), hrm.getLastReceivedTxnId()); } else if (message instanceof LocalObjectMessage) { LocalObjectMessage lom = (LocalObjectMessage)message; if (lom.payload instanceof Runnable) { ((Runnable)lom.payload).run(); } else if (lom.payload instanceof Request) { Request r = (Request)lom.payload; long txnId = 0; boolean isRead = false; switch(r.type) { case OpCode.createSession: txnId = r.sessionId; break; //For reads see if we can skip global agreement and just do the read case OpCode.exists: case OpCode.getChildren: case OpCode.getChildren2: case OpCode.getData: //If there are writes they can go in the queue (and some reads), don't short circuit //in this case because ordering of reads and writes matters if (m_txnQueue.isEmpty()) { r.setOwner(m_hsId); m_server.prepRequest(new Request(r), m_lastUsedTxnId); return; } isRead = true; //Fall through is intentional, going with the default of putting //it in the global order default: txnId = m_idManager.getNextUniqueTransactionId(); break; } /* * Don't send the whole request if this is a read blocked on a write * We may send a heartbeat instead of propagating a useless read transaction * at the end of this block */ if (!isRead) { for (long initiatorHSId : m_hsIds) { if (initiatorHSId == m_hsId) continue; AgreementTaskMessage atm = new AgreementTaskMessage( r, txnId, m_hsId, m_safetyState.getNewestGloballySafeTxnId()); m_mailbox.send( initiatorHSId, atm); } } //Process the ATM eagerly locally to aid //in having a complete set of stuff to ship //to a recovering agreement site AgreementTaskMessage atm = new AgreementTaskMessage( new Request(r), txnId, m_hsId, m_safetyState.getNewestGloballySafeTxnId()); atm.m_sourceHSId = m_hsId; processMessage(atm); /* * Don't send a heartbeat out for ever single blocked read that occurs * Try and limit to 2000 a second which is a lot and should be pretty * close to the previous behavior of propagating all reads. My measurements * don't show the old behavior is better than none at all, but I fear * change. */ if (isRead) { final long now = System.nanoTime(); if (TimeUnit.NANOSECONDS.toMicros(now - m_lastHeartbeatTime) > 500) { m_lastHeartbeatTime = now; sendHeartbeats(); } } } } else if (message instanceof AgreementTaskMessage) { AgreementTaskMessage atm = (AgreementTaskMessage)message; if (!m_transactionsById.containsKey(atm.m_txnId) && atm.m_txnId >= m_minTxnIdAfterRecovery) { m_txnQueue.noteTransactionRecievedAndReturnLastSeen(atm.m_initiatorHSId, atm.m_txnId, atm.m_lastSafeTxnId); AgreementTransactionState transactionState = new AgreementTransactionState(atm.m_txnId, atm.m_initiatorHSId, atm.m_request); if (m_txnQueue.add(transactionState)) { m_transactionsById.put(transactionState.txnId, transactionState); } else { m_agreementLog.info( "Dropping txn " + transactionState.txnId + " data from failed initiatorSiteId: " + transactionState.initiatorHSId); } } else { m_recoveryLog.info("Agreement, discarding duplicate txn during recovery, txnid is " + atm.m_txnId + " this should only occur during recovery. minTxnIdAfterRecovery " + m_minTxnIdAfterRecovery + " and dup is " + m_transactionsById.containsKey(atm.m_txnId)); } } else if (message instanceof BinaryPayloadMessage) { BinaryPayloadMessage bpm = (BinaryPayloadMessage)message; ByteBuffer metadata = ByteBuffer.wrap(bpm.m_metadata); final byte type = metadata.get(); if (type == BINARY_PAYLOAD_SNAPSHOT) { assert(m_recovering); assert(m_recoveryStage == RecoveryStage.SENT_PROPOSAL); if (m_recoveryStage != RecoveryStage.SENT_PROPOSAL) { org.voltdb.VoltDB.crashLocalVoltDB( "Received a recovery snapshot in stage " + m_recoveryStage.toString(), true, null); } long selectedRecoverBeforeTxn = metadata.getLong(); if (selectedRecoverBeforeTxn < m_recoverBeforeTxn) { org.voltdb.VoltDB.crashLocalVoltDB( "Selected recover before txn was earlier than the proposed recover before txn", true, null); } m_recoverBeforeTxn = selectedRecoverBeforeTxn; m_minTxnIdAfterRecovery = m_recoverBeforeTxn;//anything before this precedes the snapshot try { m_recoverySnapshot = org.xerial.snappy.Snappy.uncompress(bpm.m_payload); } catch (IOException e) { org.voltdb.VoltDB.crashLocalVoltDB("Unable to decompress ZK snapshot", true, e); } m_recoveryStage = RecoveryStage.RECEIVED_SNAPSHOT; /* * Clean out all txns from before the snapshot */ Iterator<Map.Entry< Long, OrderableTransaction>> iter = m_transactionsById.entrySet().iterator(); while (iter.hasNext()) { final Map.Entry< Long, OrderableTransaction> entry = iter.next(); if (entry.getKey() < m_minTxnIdAfterRecovery) { m_txnQueue.faultTransaction(entry.getValue()); iter.remove(); } } } else if (type == BINARY_PAYLOAD_JOIN_REQUEST) { JSONObject jsObj = new JSONObject(new String(bpm.m_payload, "UTF-8")); final long initiatorHSId = jsObj.getLong("initiatorHSId"); final long txnId = jsObj.getLong("txnId"); final long lastSafeTxnId = jsObj.getLong("lastSafeTxnId"); final long joiningHSId = jsObj.getLong("joiningHSId"); if (m_recovering) { org.voltdb.VoltDB.crashLocalVoltDB( "Received a join request during recovery for " + CoreUtils.hsIdToString(joiningHSId) + " from " + CoreUtils.hsIdToString(initiatorHSId), true, null); } m_txnQueue.noteTransactionRecievedAndReturnLastSeen(initiatorHSId, txnId, lastSafeTxnId); AgreementRejoinTransactionState transactionState = new AgreementRejoinTransactionState(txnId, initiatorHSId, joiningHSId, null); if (m_txnQueue.add(transactionState)) { m_transactionsById.put(transactionState.txnId, transactionState); } else { m_agreementLog.info( "Dropping txn " + transactionState.txnId + " data from failed initiatorSiteId: " + transactionState.initiatorHSId); } } } else if (message instanceof FaultMessage) { FaultMessage fm = (FaultMessage)message; discoverGlobalFaultData(fm); } else if (message instanceof RecoveryMessage) { RecoveryMessage rm = (RecoveryMessage)message; assert(m_recoverBeforeTxn == null); assert(m_siteRequestingRecovery == null); assert(m_recovering == false); assert(m_recoveryStage == RecoveryStage.RECOVERED); m_recoverBeforeTxn = rm.txnId(); m_siteRequestingRecovery = rm.sourceSite(); } } private void processZKSnapshot() { ByteArrayInputStream bais = new ByteArrayInputStream(m_recoverySnapshot); try { DataInputStream dis = new DataInputStream(bais); BinaryInputArchive bia = new BinaryInputArchive(dis); m_server.getZKDatabase().deserializeSnapshot(bia); m_server.createSessionTracker(); } catch (Exception e) { org.voltdb.VoltDB.crashLocalVoltDB("Error loading agreement database", false, e); } m_recoverySnapshot = null; m_recoveryStage = RecoveryStage.RECOVERED; m_recovering = false; m_recoverBeforeTxn = null; m_recoveryComplete.countDown(); m_agreementLog.info("Loaded ZK snapshot"); } private void shipZKDatabaseSnapshot(long joiningAgreementSite, long txnId) throws IOException { m_recoveryLog.info("Shipping ZK snapshot from " + CoreUtils.hsIdToString(m_hsId) + " to " + CoreUtils.hsIdToString(joiningAgreementSite)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); DataOutputStream dos = new DataOutputStream(baos); BinaryOutputArchive boa = new BinaryOutputArchive(dos); m_server.getZKDatabase().serializeSnapshot(boa); dos.flush(); byte databaseBytes[] = org.xerial.snappy.Snappy.compress(baos.toByteArray()); ByteBuffer metadata = ByteBuffer.allocate(9); metadata.put(BINARY_PAYLOAD_SNAPSHOT); metadata.putLong(txnId); BinaryPayloadMessage bpm = new BinaryPayloadMessage( metadata.array(), databaseBytes); m_mailbox.send( joiningAgreementSite, bpm); m_siteRequestingRecovery = null; m_recoverBeforeTxn = null; } private final MeshAide m_meshAide = new MeshAide() { @Override public void sendHeartbeats(Set<Long> hsIds) { AgreementSite.this.sendHeartbeats(hsIds); } @Override public Long getNewestSafeTransactionForInitiator(Long initiatorId) { return m_txnQueue.getNewestSafeTransactionForInitiator(initiatorId); } }; private void discoverGlobalFaultData(FaultMessage faultMessage) { //Keep it simple and don't try to recover on the recovering node. if (m_recovering) { org.voltdb.VoltDB.crashLocalVoltDB( "Aborting recovery due to a remote node (" + CoreUtils.hsIdToString(faultMessage.failedSite) + ") failure. Retry again.", false, null); } // Don't try to go through agreement process for a rejoining node. // Check out if the current host is rejoining, if so shut itself down. m_failedHostsCallback.disconnect(null); Set<Long> unknownFaultedHosts = new TreeSet<>(); // This one line is a biggie. Gets agreement on what the post-fault cluster will be. Map<Long, Long> initiatorSafeInitPoint = m_meshArbiter.reconfigureOnFault(m_hsIds, faultMessage, unknownFaultedHosts); ImmutableSet<Long> failedSites = ImmutableSet.copyOf(initiatorSafeInitPoint.keySet()); // check if nothing actually happened if (initiatorSafeInitPoint.isEmpty() && unknownFaultedHosts.isEmpty()) { return; } ImmutableSet.Builder<Integer> failedHosts = ImmutableSet.builder(); for (long hsId: failedSites) { failedHosts.add(CoreUtils.getHostIdFromHSId(hsId)); } // Remove any hosts associated with failed sites that we don't know // about, as could be the case with a failure early in a rejoin for (long hsId : unknownFaultedHosts) { failedHosts.add(CoreUtils.getHostIdFromHSId(hsId)); } m_failedHostsCallback.disconnect(failedHosts.build()); // Handle the failed sites after the failedHostsCallback to ensure // that partition detection is run first -- as this might release // work back to a client waiting on a failure notice. That's unsafe // if we partitioned. if (!initiatorSafeInitPoint.isEmpty()) { handleSiteFaults(failedSites, initiatorSafeInitPoint); } m_hsIds.removeAll(failedSites); } private void handleSiteFaults( Set<Long> newFailedSiteIds, Map<Long, Long> initiatorSafeInitPoint) { m_recoveryLog.info("Agreement, handling site faults for newly failed sites " + CoreUtils.hsIdCollectionToString(newFailedSiteIds) + " initiatorSafeInitPoints " + CoreUtils.hsIdKeyMapToString(initiatorSafeInitPoint)); // Fix safe transaction scoreboard in transaction queue for (Long siteId : newFailedSiteIds) { m_safetyState.removeState(siteId); m_txnQueue.gotFaultForInitiator(siteId); m_server.closeSessions(siteId); } // Remove affected transactions from RPQ and txnId hash // that are not globally initiated Iterator<Long> it = m_transactionsById.keySet().iterator(); while (it.hasNext()) { final long tid = it.next(); OrderableTransaction ts = m_transactionsById.get(tid); if (!initiatorSafeInitPoint.containsKey(ts.initiatorHSId)){ //Not from a failed initiator, no need to inspect and potentially discard continue; } // Fault a transaction that was not globally initiated if (ts.txnId > initiatorSafeInitPoint.get(ts.initiatorHSId) && newFailedSiteIds.contains(ts.initiatorHSId)) { m_recoveryLog.info("Faulting non-globally initiated transaction " + ts.txnId); it.remove(); m_txnQueue.faultTransaction(ts); } } } @Override public void request(Request r) { LocalObjectMessage lom = new LocalObjectMessage(r); lom.m_sourceHSId = m_hsId; m_mailbox.deliver(lom); } static class AgreementTransactionState extends OrderableTransaction { public final Request m_request; public AgreementTransactionState(long txnId, long initiatorHSId, Request r) { super(txnId, initiatorHSId); m_request = r; } } /* * Txn state associated with rejoining a node */ private static final class AgreementRejoinTransactionState extends OrderableTransaction { private final long m_rejoiningSite; private final CountDownLatch m_onCompletion; public AgreementRejoinTransactionState( long txnId, long initiatorSiteId, long rejoiningSite, CountDownLatch onCompletion) { super(txnId, initiatorSiteId); m_rejoiningSite = rejoiningSite; m_onCompletion = onCompletion; } } @Override public Semaphore createSession(final ServerCnxn cnxn, final byte[] passwd, final int timeout) { final Semaphore sem = new Semaphore(0); final Runnable r = new Runnable() { @Override public void run() { try { long sessionId = m_idManager.getNextUniqueTransactionId(); Random r = new Random(sessionId ^ ZooKeeperServer.superSecret); r.nextBytes(passwd); ByteBuffer to = ByteBuffer.allocate(4); to.putInt(timeout); to.flip(); cnxn.setSessionId(sessionId); Request si = new Request(cnxn, sessionId, 0, OpCode.createSession, to, null); try { LocalObjectMessage lom = new LocalObjectMessage(si); lom.m_sourceHSId = m_hsId; processMessage(lom); } catch (Exception e) { org.voltdb.VoltDB.crashLocalVoltDB( "Unexpected exception processing AgreementSite message", true, e); } } finally { sem.release(); } } }; LocalObjectMessage lom = new LocalObjectMessage(r); lom.m_sourceHSId = m_hsId; m_mailbox.deliverFront(lom); return sem; } public void reportFault(long faultingSite) { FaultMessage fm = new FaultMessage(m_hsId,faultingSite); fm.m_sourceHSId = m_hsId; m_mailbox.deliver(fm); } public void reportFault(FaultMessage fm) { fm.m_sourceHSId = m_hsId; m_mailbox.deliver(fm); } public void waitForRecovery() throws InterruptedException { if (!m_recovering) { return; } // this timeout is totally arbitrary // 30s is pretty long in general, but sometimes localcluster may need this long :-( if (!m_recoveryComplete.await(30, TimeUnit.SECONDS)) { org.voltdb.VoltDB.crashLocalVoltDB("Timed out waiting for the agreement site to recover", false, null); } } /* * Construct a ZK transaction that will add the initiator to the cluster */ public CountDownLatch requestJoin(final long joiningSite) throws Exception { final CountDownLatch cdl = new CountDownLatch(1); final Runnable r = new Runnable() { @Override public void run() { try { final long txnId = m_idManager.getNextUniqueTransactionId(); for (long initiatorHSId : m_hsIds) { if (initiatorHSId == m_hsId) continue; JSONObject jsObj = new JSONObject(); jsObj.put("txnId", txnId); jsObj.put("initiatorHSId", m_hsId); jsObj.put("joiningHSId", joiningSite); jsObj.put("lastSafeTxnId", m_safetyState.getNewestSafeTxnIdForExecutorBySiteId(initiatorHSId)); byte payload[] = jsObj.toString(4).getBytes("UTF-8"); ByteBuffer metadata = ByteBuffer.allocate(1); metadata.put(BINARY_PAYLOAD_JOIN_REQUEST); BinaryPayloadMessage bpm = new BinaryPayloadMessage(metadata.array(), payload); m_mailbox.send( initiatorHSId, bpm); } m_txnQueue.noteTransactionRecievedAndReturnLastSeen(m_hsId, txnId, m_safetyState.getNewestGloballySafeTxnId()); AgreementRejoinTransactionState arts = new AgreementRejoinTransactionState( txnId, m_hsId, joiningSite, cdl ); if (!m_txnQueue.add(arts)) { org.voltdb.VoltDB.crashLocalVoltDB("Shouldn't have failed to add txn", true, null); } m_transactionsById.put(arts.txnId, arts); } catch (Throwable e) { org.voltdb.VoltDB.crashLocalVoltDB("Error constructing JSON", false, e); } } }; LocalObjectMessage lom = new LocalObjectMessage(r); lom.m_sourceHSId = m_hsId; m_mailbox.deliver(lom); return cdl; } public int getFailedSiteCount() { return m_meshArbiter.getFailedSitesCount(); } }
package org.voltdb; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.zip.CRC32; import org.voltdb.client.ConnectionUtil; import org.voltdb.logging.VoltLogger; import org.voltdb.messaging.FastSerializer; import org.voltdb.utils.DBBPool; import org.voltdb.utils.DBBPool.BBContainer; public class DefaultSnapshotDataTarget implements SnapshotDataTarget { public static volatile boolean m_simulateFullDiskWritingHeader = false; public static volatile boolean m_simulateFullDiskWritingChunk = false; private final File m_file; private final FileChannel m_channel; private final FileOutputStream m_fos; private static final VoltLogger hostLog = new VoltLogger("HOST"); private final ScheduledExecutorService m_es; private Runnable m_onCloseHandler = null; /* * If a write fails then this snapshot is hosed. * Set the flag so all writes return immediately. The system still * needs to scan the all table to clear the dirty bits * so the process continues as if the writes are succeeding. * A more efficient failure mode would do the scan but not the * extra serialization work. */ private volatile boolean m_writeFailed = false; private volatile IOException m_writeException = null; private volatile long m_bytesWritten = 0; private static final long m_maxBytesWrittenBetweenSync = (1024 * 1024) * 256; private long m_bytesWrittenSinceLastSync = 0; private long m_lastSyncTime = 0; /* * Accept a single write even though simulating a full disk is enabled; */ private volatile boolean m_acceptOneWrite = false; @SuppressWarnings("unused") private final String m_tableName; public DefaultSnapshotDataTarget( final File file, final int hostId, final String clusterName, final String databaseName, final String tableName, final int numPartitions, final boolean isReplicated, final int partitionIds[], final VoltTable schemaTable, final long createTime) throws IOException { this( file, hostId, clusterName, databaseName, tableName, numPartitions, isReplicated, partitionIds, schemaTable, createTime, new int[] { 0, 0, 0, 0 }); } public DefaultSnapshotDataTarget( final File file, final int hostId, final String clusterName, final String databaseName, final String tableName, final int numPartitions, final boolean isReplicated, final int partitionIds[], final VoltTable schemaTable, final long createTime, int version[] ) throws IOException { String hostname = ConnectionUtil.getHostnameOrAddress(); m_file = file; m_tableName = tableName; m_fos = new FileOutputStream(file); m_channel = m_fos.getChannel(); m_es = Executors.newScheduledThreadPool(1, new ThreadFactory() { @Override public Thread newThread(Runnable r) { return new Thread( Thread.currentThread().getThreadGroup(), r, "Snapshot target for " + file.getName() + " host " + hostId, 131072); } }); final FastSerializer fs = new FastSerializer(); fs.writeInt(0);//CRC fs.writeInt(0);//Header length placeholder fs.writeByte(1);//Indicate the snapshot was not completed, set to true for the CRC calculation, false later for (int ii = 0; ii < 4; ii++) { fs.writeInt(version[ii]);//version } fs.writeLong(createTime); fs.writeInt(hostId); fs.writeString(hostname); fs.writeString(clusterName); fs.writeString(databaseName); fs.writeString(tableName.toUpperCase()); fs.writeBoolean(isReplicated); if (!isReplicated) { fs.writeArray(partitionIds); fs.writeInt(numPartitions); } final BBContainer container = fs.getBBContainer(); container.b.position(4); container.b.putInt(container.b.remaining() - 4); container.b.position(0); FastSerializer schemaSerializer = new FastSerializer(); schemaTable.writeExternal(schemaSerializer); final BBContainer schemaContainer = schemaSerializer.getBBContainer(); schemaContainer.b.limit(schemaContainer.b.limit() - 4);//Don't want the row count schemaContainer.b.position(schemaContainer.b.position() + 4);//Don't want total table length final CRC32 crc = new CRC32(); ByteBuffer aggregateBuffer = ByteBuffer.allocate(container.b.remaining() + schemaContainer.b.remaining()); aggregateBuffer.put(container.b); aggregateBuffer.put(schemaContainer.b); aggregateBuffer.flip(); crc.update(aggregateBuffer.array(), 4, aggregateBuffer.capacity() - 4); final int crcValue = (int) crc.getValue(); aggregateBuffer.putInt(crcValue).position(8); aggregateBuffer.put((byte)0).position(0);//Haven't actually finished writing file if (m_simulateFullDiskWritingHeader) { m_writeException = new IOException("Disk full"); m_writeFailed = true; m_fos.close(); m_es.shutdown(); throw m_writeException; } /* * Be completely sure the write succeeded. If it didn't * the disk is probably full or the path is bunk etc. */ m_acceptOneWrite = true; Future<?> writeFuture = write(DBBPool.wrapBB(aggregateBuffer), false); try { writeFuture.get(); } catch (InterruptedException e) { m_fos.close(); m_es.shutdown(); return; } catch (ExecutionException e) { m_fos.close(); m_es.shutdown(); throw m_writeException; } if (m_writeFailed) { m_fos.close(); m_es.shutdown(); throw m_writeException; } m_es.scheduleAtFixedRate(new Runnable() { @Override public void run() { final long now = System.currentTimeMillis(); if (now - m_lastSyncTime < 10000 || m_bytesWrittenSinceLastSync == 0) { return; } try { m_fos.getFD().sync(); } catch (IOException e) { m_writeException = e; hostLog.error("Error while attempting to sync snapshot data for file " + m_file, e); m_writeFailed = true; throw new RuntimeException(e); } m_lastSyncTime = System.currentTimeMillis(); m_bytesWrittenSinceLastSync = 0; } }, 10000, 10, TimeUnit.SECONDS); } @Override public void close() throws IOException, InterruptedException { m_es.shutdown(); m_es.awaitTermination( 1, TimeUnit.DAYS); m_channel.force(true); m_fos.getFD().sync(); m_channel.position(8); ByteBuffer completed = ByteBuffer.allocate(1); if (m_writeFailed) { completed.put((byte)0).flip(); } else { completed.put((byte)1).flip(); } m_channel.write(completed); m_fos.getFD().sync(); m_channel.close(); if (m_onCloseHandler != null) { m_onCloseHandler.run(); } } @Override public int getHeaderSize() { return 4; } private Future<?> write(final BBContainer tupleData, final boolean prependLength) { if (m_writeFailed) { tupleData.discard(); return null; } if (prependLength) { tupleData.b.putInt(tupleData.b.remaining() - 4); tupleData.b.position(0); } return m_es.submit(new Callable<Object>() { @Override public Object call() throws Exception { try { if (m_acceptOneWrite) { m_acceptOneWrite = false; } else { if (m_simulateFullDiskWritingChunk) { throw new IOException("Disk full"); } } while (tupleData.b.hasRemaining()) { final long written = m_channel.write(tupleData.b); m_bytesWritten += written; m_bytesWrittenSinceLastSync += written; } if (m_bytesWrittenSinceLastSync >= m_maxBytesWrittenBetweenSync) { m_fos.getFD().sync(); m_bytesWrittenSinceLastSync = 0; m_lastSyncTime = System.currentTimeMillis(); } } catch (IOException e) { m_writeException = e; hostLog.error("Error while attempting to write snapshot data to file " + m_file, e); m_writeFailed = true; throw e; } finally { tupleData.discard(); } return null; } }); } @Override public Future<?> write(final BBContainer tupleData) { return write(tupleData, true); } @Override public long getBytesWritten() { return m_bytesWritten; } @Override public void setOnCloseHandler(Runnable onClose) { m_onCloseHandler = onClose; } @Override public IOException getLastWriteException() { return m_writeException; } }
package io.bitsquare.app; import io.bitsquare.BitsquareModule; import io.bitsquare.btc.BitcoinModule; import io.bitsquare.crypto.CryptoModule; import io.bitsquare.msg.MessageModule; import io.bitsquare.msg.tomp2p.TomP2PMessageModule; import io.bitsquare.offer.OfferModule; import io.bitsquare.offer.tomp2p.TomP2POfferModule; import io.bitsquare.persistence.Persistence; import io.bitsquare.settings.Settings; import io.bitsquare.trade.TradeModule; import io.bitsquare.user.User; import com.google.common.base.Preconditions; import com.google.inject.Injector; import com.google.inject.name.Names; import java.util.Properties; import net.tomp2p.connection.Ports; /** * Configures all non-UI modules necessary to run a Bitsquare application. */ public class AppModule extends BitsquareModule { public static final String APP_NAME_KEY = "appName"; public AppModule(Properties properties) { super(properties); } @Override protected void configure() { bind(User.class).asEagerSingleton(); bind(Persistence.class).asEagerSingleton(); bind(Settings.class).asEagerSingleton(); install(messageModule()); install(bitcoinModule()); install(cryptoModule()); install(tradeModule()); install(offerModule()); String appName = properties.getProperty(APP_NAME_KEY); Preconditions.checkArgument(appName != null, "App name must be non-null"); bindConstant().annotatedWith(Names.named("appName")).to(appName); int randomPort = new Ports().tcpPort(); bindConstant().annotatedWith(Names.named("clientPort")).to(randomPort); } protected MessageModule messageModule() { return new TomP2PMessageModule(properties); } protected BitcoinModule bitcoinModule() { return new BitcoinModule(properties); } protected CryptoModule cryptoModule() { return new CryptoModule(properties); } protected TradeModule tradeModule() { return new TradeModule(properties); } protected OfferModule offerModule() { return new TomP2POfferModule(properties); } @Override protected void doClose(Injector injector) { } }
package org.springframework.cloud.service.relational; import javax.sql.DataSource; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.cloud.ReflectionUtils; import org.springframework.cloud.service.PooledServiceConnectorConfig.PoolConfig; import org.springframework.cloud.service.ServiceConnectorConfig; import org.springframework.cloud.service.common.MysqlServiceInfo; import org.springframework.cloud.service.relational.DataSourceConfig.ConnectionConfig; import java.util.Collections; import java.util.List; import java.util.Properties; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import static org.springframework.cloud.service.Util.hasClass; import static org.springframework.cloud.service.relational.BasicDbcpPooledDataSourceCreator.DBCP2_BASIC_DATASOURCE; import static org.springframework.cloud.service.relational.BasicDbcpPooledDataSourceCreator.DBCP_BASIC_DATASOURCE; import static org.springframework.cloud.service.relational.HikariCpPooledDataSourceCreator.HIKARI_DATASOURCE; import static org.springframework.cloud.service.relational.TomcatDbcpPooledDataSourceCreator.TOMCAT_7_DBCP; import static org.springframework.cloud.service.relational.TomcatDbcpPooledDataSourceCreator.TOMCAT_8_DBCP; import static org.springframework.cloud.service.relational.TomcatJdbcPooledDataSourceCreator.TOMCAT_JDBC_DATASOURCE; public class PooledDataSourceCreatorsTest { private static final int MIN_POOL_SIZE = 100; private static final int MAX_POOL_SIZE = 200; private static final int MAX_WAIT_TIME = 5; private static final String CONNECTION_PROPERTIES_STRING = "useUnicode=true;characterEncoding=UTF-8"; private static final Properties CONNECTION_PROPERTIES = new Properties() {{ setProperty("useUnicode", "true"); setProperty("characterEncoding", "UTF-8"); }}; @Mock private MysqlServiceInfo mockMysqlServiceInfo; // Just to grab driver class name and validation query string private MysqlDataSourceCreator mysqlDataSourceCreator = new MysqlDataSourceCreator(); @Before public void setup() { MockitoAnnotations.initMocks(this); when(mockMysqlServiceInfo.getJdbcUrl()).thenReturn("jdbc:mysql://myuser:mypassword@10.20.30.40:3306/database-123"); } @Test public void pooledDataSourceCreationDefault() throws Exception { PoolConfig poolConfig = new PoolConfig(MIN_POOL_SIZE, MAX_POOL_SIZE, MAX_WAIT_TIME); ConnectionConfig connectionConfig = new ConnectionConfig(CONNECTION_PROPERTIES_STRING); DataSourceConfig config = new DataSourceConfig(poolConfig, connectionConfig); DataSource ds = createMysqlDataSource(config); assertTomcatJdbcDataSource(ds); } @Test public void pooledDataSourceCreationDbcp() throws Exception { DataSource ds = createMysqlDataSourceWithPooledName("BasicDbcp"); assertBasicDbcpDataSource(ds); ds = createMysqlDataSourceWithPooledName(BasicDbcpPooledDataSourceCreator.class.getSimpleName()); assertBasicDbcpDataSource(ds); } @Test public void pooledDataSourceCreationTomcatDbcp() throws Exception { DataSource ds = createMysqlDataSourceWithPooledName("TomcatDbcp"); assertTomcatDbcpDataSource(ds); ds = createMysqlDataSourceWithPooledName(TomcatDbcpPooledDataSourceCreator.class.getSimpleName()); assertTomcatDbcpDataSource(ds); } @Test public void pooledDataSourceCreationTomcatJdbc() throws Exception { DataSource ds = createMysqlDataSourceWithPooledName("TomcatJdbc"); assertTomcatJdbcDataSource(ds); ds = createMysqlDataSourceWithPooledName(TomcatJdbcPooledDataSourceCreator.class.getSimpleName()); assertTomcatJdbcDataSource(ds); } @Test public void pooledDataSourceCreationHikariCP() throws Exception { DataSource ds = createMysqlDataSourceWithPooledName("HikariCp"); assertHikariDataSource(ds); ds = createMysqlDataSourceWithPooledName(HikariCpPooledDataSourceCreator.class.getSimpleName()); assertHikariDataSource(ds); } @Test public void pooledDataSourceCreationInvalid() throws Exception { DataSource ds = createMysqlDataSourceWithPooledName("Dummy"); assertThat(ds, instanceOf(org.springframework.jdbc.datasource.SimpleDriverDataSource.class)); } private DataSource createMysqlDataSourceWithPooledName(String pooledDataSourceName) { List<String> dataSourceNames = Collections.singletonList(pooledDataSourceName); PoolConfig poolConfig = new PoolConfig(MIN_POOL_SIZE, MAX_POOL_SIZE, MAX_WAIT_TIME); ConnectionConfig connectionConfig = new ConnectionConfig(CONNECTION_PROPERTIES_STRING); DataSourceConfig config = new DataSourceConfig(poolConfig, connectionConfig, dataSourceNames); return createMysqlDataSource(config); } private DataSource createMysqlDataSource(ServiceConnectorConfig config) { return mysqlDataSourceCreator.create(mockMysqlServiceInfo, config); } private void assertBasicDbcpDataSource(DataSource ds) throws ClassNotFoundException { assertTrue(hasClass(DBCP2_BASIC_DATASOURCE) || hasClass(DBCP_BASIC_DATASOURCE)); if (hasClass(DBCP2_BASIC_DATASOURCE)) { assertThat(ds, instanceOf(Class.forName(DBCP2_BASIC_DATASOURCE))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minIdle")); assertEquals(MAX_POOL_SIZE, getIntValue(ds, "maxTotal")); assertEquals(MAX_WAIT_TIME, getIntValue(ds, "maxWaitMillis")); assertEquals(CONNECTION_PROPERTIES, getPropertiesValue(ds, "connectionProperties")); } if (hasClass(DBCP_BASIC_DATASOURCE) && !hasClass(DBCP2_BASIC_DATASOURCE)) { assertThat(ds, instanceOf(Class.forName(DBCP_BASIC_DATASOURCE))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minIdle")); assertEquals(MAX_POOL_SIZE, getIntValue(ds, "maxActive")); assertEquals(MAX_WAIT_TIME, getIntValue(ds, "maxWait")); assertEquals(CONNECTION_PROPERTIES, getPropertiesValue(ds, "connectionProperties")); } } private void assertTomcatDbcpDataSource(DataSource ds) throws ClassNotFoundException { assertTrue(hasClass(TOMCAT_7_DBCP) || hasClass(TOMCAT_8_DBCP)); if (hasClass(TOMCAT_7_DBCP)) { assertThat(ds, instanceOf(Class.forName(TOMCAT_7_DBCP))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minIdle")); assertEquals(MAX_WAIT_TIME, getIntValue(ds, "maxWait")); assertEquals(CONNECTION_PROPERTIES, getPropertiesValue(ds, "connectionProperties")); } if (hasClass(TOMCAT_8_DBCP)) { assertThat(ds, instanceOf(Class.forName(TOMCAT_8_DBCP))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minIdle")); assertEquals(MAX_POOL_SIZE, getIntValue(ds, "maxTotal")); assertEquals(MAX_WAIT_TIME, getIntValue(ds, "maxWaitMillis")); assertEquals(CONNECTION_PROPERTIES, getPropertiesValue(ds, "connectionProperties")); } } private void assertTomcatJdbcDataSource(DataSource ds) throws ClassNotFoundException { assertThat(ds, instanceOf(Class.forName(TOMCAT_JDBC_DATASOURCE))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minIdle")); assertEquals(MAX_WAIT_TIME, getIntValue(ds, "maxWait")); // the results of setConnectionProperties are reflected by getDbProperties, not getConnectionProperties assertEquals(CONNECTION_PROPERTIES, getPropertiesValue(ds, "dbProperties")); } private void assertHikariDataSource(DataSource ds) throws ClassNotFoundException { assertThat(ds, instanceOf(Class.forName(HIKARI_DATASOURCE))); assertEquals(MIN_POOL_SIZE, getIntValue(ds, "minimumIdle")); assertEquals(MAX_POOL_SIZE, getIntValue(ds, "maximumPoolSize")); } private int getIntValue(Object target, String fieldName) { return Integer.valueOf(getStringValue(target, fieldName)); } private String getStringValue(Object target, String fieldName) { return getValue(target, fieldName).toString(); } private Properties getPropertiesValue(Object target, String fieldName) { return (Properties) getValue(target, fieldName); } private Object getValue(Object target, String fieldName) { Object value = ReflectionUtils.getValue(target, fieldName); if (value == null) { throw new IllegalArgumentException("Bad field name " + fieldName + " for target object " + target); } return value; } }
package com.squareup.calendar; import android.content.Context; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.ListView; import android.widget.Toast; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; import static java.util.Calendar.DATE; import static java.util.Calendar.DAY_OF_MONTH; import static java.util.Calendar.DAY_OF_WEEK; import static java.util.Calendar.HOUR_OF_DAY; import static java.util.Calendar.MILLISECOND; import static java.util.Calendar.MINUTE; import static java.util.Calendar.MONTH; import static java.util.Calendar.SECOND; import static java.util.Calendar.SUNDAY; import static java.util.Calendar.YEAR; /** * Android component to allow picking a date from a calendar view (a list of months). Must be * initialized after inflation with {@link #init(java.util.Date, java.util.Date, java.util.Date)}. * The currently selected date can be retrieved with {@link #getSelectedDate()} or {@link * #getSelectedDateMillis()}. */ public class CalendarPickerView extends ListView { private final CalendarPickerView.MonthAdapter adapter; private final DateFormat monthNameFormat; private final DateFormat weekdayNameFormat; private final DateFormat fullDateFormat; final List<MonthDescriptor> months = new ArrayList<MonthDescriptor>(); final List<List<List<MonthCellDescriptor>>> cells = new ArrayList<List<List<MonthCellDescriptor>>>(); private MonthCellDescriptor selectedCell; final Calendar today = Calendar.getInstance(); private final Calendar selectedCal = Calendar.getInstance(); private final Calendar minCal = Calendar.getInstance(); private final Calendar maxCal = Calendar.getInstance(); private final Calendar monthCounter = Calendar.getInstance(); private final MonthView.Listener listener = new CellClickedListener(); public CalendarPickerView(Context context, AttributeSet attrs) { super(context, attrs); adapter = new MonthAdapter(); setDivider(null); setDividerHeight(0); setAdapter(adapter); final int bg = getResources().getColor(R.color.calendar_bg); setBackgroundColor(bg); setCacheColorHint(bg); monthNameFormat = new SimpleDateFormat(context.getString(R.string.month_name_format)); weekdayNameFormat = new SimpleDateFormat(context.getString(R.string.day_name_format)); fullDateFormat = DateFormat.getDateInstance(DateFormat.MEDIUM); } /** * All date parameters must be non-null and their {@link java.util.Date#getTime()} must not * return 0. Time of day will be ignored. For instance, if you pass in {@code minDate} as * 11/16/2012 5:15pm and {@code maxDate} as 11/16/2013 4:30am, 11/16/2012 will be the first * selectable date and 11/15/2013 will be the last selectable date ({@code maxDate} is exclusive). * * @param selectedDate Initially selected date. Must be between {@code minDate} and * {@code maxDate}. * @param minDate Earliest selectable date, inclusive. Must be earlier than {@code maxDate}. * @param maxDate Latest selectable date, exclusive. Must be later than {@code minDate}. */ public void init(Date selectedDate, Date minDate, Date maxDate) { if (selectedDate == null || minDate == null || maxDate == null) { throw new IllegalArgumentException( "All dates must be non-null. " + dbg(selectedDate, minDate, maxDate)); } if (selectedDate.getTime() == 0 || minDate.getTime() == 0 || maxDate.getTime() == 0) { throw new IllegalArgumentException( "All dates must be non-zero. " + dbg(selectedDate, minDate, maxDate)); } if (minDate.after(maxDate)) { throw new IllegalArgumentException( "Min date must be before max date. " + dbg(selectedDate, minDate, maxDate)); } if (selectedDate.before(minDate) || selectedDate.after(maxDate)) { throw new IllegalArgumentException( "selectedDate must be between minDate and maxDate. " + dbg(selectedDate, minDate, maxDate)); } // Clear previous state. cells.clear(); months.clear(); // Sanitize input: clear out the hours/minutes/seconds/millis. selectedCal.setTime(selectedDate); minCal.setTime(minDate); maxCal.setTime(maxDate); setMidnight(selectedCal); setMidnight(minCal); setMidnight(maxCal); // Now iterate between minCal and maxCal and build up our list of months to show. monthCounter.setTime(minCal.getTime()); final int maxMonth = maxCal.get(MONTH); final int maxYear = maxCal.get(YEAR); while ((monthCounter.get(MONTH) <= maxMonth // Up to, including the month. || monthCounter.get(YEAR) < maxYear) // Up to the year. && monthCounter.get(YEAR) < maxYear + 1) { // But not > next yr. MonthDescriptor month = new MonthDescriptor(monthCounter.get(MONTH), monthCounter.get(YEAR), monthNameFormat.format(monthCounter.getTime())); cells.add(getMonthCells(month, monthCounter, selectedCal)); Logr.d("Adding month " + month); months.add(month); monthCounter.add(MONTH, 1); } adapter.notifyDataSetChanged(); } public long getSelectedDateMillis() { return getSelectedDate().getTimeInMillis(); } public Calendar getSelectedDate() { return selectedCal; } /** Returns a string summarizing what the client sent us for init() params. */ private static String dbg(Date startDate, Date minDate, Date maxDate) { return "startDate: " + startDate + "\nminDate: " + minDate + "\nmaxDate: " + maxDate; } /** Clears out the hours/minutes/seconds/millis of a Calendar. */ private static void setMidnight(Calendar cal) { cal.set(HOUR_OF_DAY, 0); cal.set(MINUTE, 0); cal.set(SECOND, 0); cal.set(MILLISECOND, 0); } private class CellClickedListener implements MonthView.Listener { @Override public void handleClick(MonthCellDescriptor cell) { if (!betweenDates(cell.getDate(), minCal, maxCal)) { String errMessage = getResources().getString(R.string.invalid_date, fullDateFormat.format(minCal.getTime()), fullDateFormat.format(maxCal.getTime())); Toast.makeText(getContext(), errMessage, Toast.LENGTH_SHORT).show(); } else { // De-select the currently-selected cell. selectedCell.setSelected(false); // Select the new cell. selectedCell = cell; selectedCell.setSelected(true); // Track the currently selected date value. selectedCal.setTime(cell.getDate()); // Update the adapter. adapter.notifyDataSetChanged(); } } } private class MonthAdapter extends BaseAdapter { private final LayoutInflater inflater; private MonthAdapter() { inflater = LayoutInflater.from(getContext()); } @Override public boolean isEnabled(int position) { // Disable selectability: each cell will handle that itself. return false; } @Override public int getCount() { return months.size(); } @Override public Object getItem(int position) { return months.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { MonthView monthView = (MonthView) convertView; if (monthView == null) { monthView = MonthView.create(parent, inflater, weekdayNameFormat, listener, today); } monthView.init(months.get(position), cells.get(position)); return monthView; } } List<List<MonthCellDescriptor>> getMonthCells(MonthDescriptor month, Calendar startCal, Calendar selectedDate) { Calendar cal = Calendar.getInstance(); cal.setTime(startCal.getTime()); List<List<MonthCellDescriptor>> cells = new ArrayList<List<MonthCellDescriptor>>(); cal.set(DAY_OF_MONTH, 1); int firstDayOfWeek = cal.get(DAY_OF_WEEK); cal.add(DATE, SUNDAY - firstDayOfWeek); while ((cal.get(MONTH) < month.getMonth() + 1 || cal.get(YEAR) < month.getYear()) && cal.get(YEAR) <= month.getYear()) { Logr.d("Building week row starting at " + cal.getTime()); List<MonthCellDescriptor> weekCells = new ArrayList<MonthCellDescriptor>(); cells.add(weekCells); for (int c = 0; c < 7; c++) { Date date = cal.getTime(); boolean isCurrentMonth = cal.get(MONTH) == month.getMonth(); boolean isSelected = isCurrentMonth && sameDate(cal, selectedDate); boolean isSelectable = isCurrentMonth && betweenDates(cal, minCal, maxCal); boolean isToday = sameDate(cal, today); int value = cal.get(DAY_OF_MONTH); MonthCellDescriptor cell = new MonthCellDescriptor(date, isCurrentMonth, isSelectable, isSelected, isToday, value); if (isSelected) { selectedCell = cell; } weekCells.add(cell); cal.add(DATE, 1); } } return cells; } private static boolean sameDate(Calendar cal, Calendar selectedDate) { return cal.get(MONTH) == selectedDate.get(MONTH) && cal.get(YEAR) == selectedDate.get(YEAR) && cal.get(DAY_OF_MONTH) == selectedDate.get(DAY_OF_MONTH); } private static boolean betweenDates(Calendar cal, Calendar minCal, Calendar maxCal) { final Date date = cal.getTime(); return betweenDates(date, minCal, maxCal); } static boolean betweenDates(Date date, Calendar minCal, Calendar maxCal) { final Date min = minCal.getTime(); return (date.equals(min) || date.after(min)) // >= minCal && date.before(maxCal.getTime()); // && < maxCal } }
package org.blendee.sql; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.Lock; import java.util.function.Function; import java.util.function.Supplier; /** * SQL SQL * @author */ public abstract class Recorder { private final Map<Class<?>, Reproducer> executorCache = new HashMap<>(); private final Map<Class<?>, Map<?, Reproducer>> executorMapCache = new HashMap<>(); private static final Recorder syncRecorder = new SyncRecorder(); /** * SQL {@link Recorder} <br> * SQL * @return {@link Recorder} */ public static Recorder instance() { return syncRecorder; } /** * SQL {@link Recorder} <br> * SQL * @return {@link Recorder} */ public static Recorder newAsyncInstance() { return new AsyncRecorder(); } /** * {@link Reproducible} <br> * {@link Reproducible} {@link Reproducible} {@link Reproducible} <br> * {@link Reproducible} playbackPlaceHolderValues * playbackPlaceHolderValues {@link Reproducible#reproduce(Object...)} {@link Reproducible} <br> * @param supplier {@link Reproducible} * @param playbackPlaceHolderValues supplier {@link Placeholder} * @param <E> {@link Reproducible} * @return {@link Reproducible} */ @SuppressWarnings("unchecked") public <E extends Reproducible<E>> E play(Supplier<E> supplier, Object... playbackPlaceHolderValues) { if (playbackPlaceHolderValues.length == 0) return (E) prepare(supplier, lock()).reproduce(); return (E) prepare(supplier, lock()).reproduce(playbackPlaceHolderValues); } /** * {@link Reproducible} <br> * {@link Reproducible} {@link Reproducible} {@link Reproducible} <br> * {@link Reproducible} playbackPlaceHolderValues <br> * decision <br> * decision {@link HashMap} * @param decision {@link Supplier} * @param supplier decision {@link Reproducible} * @param playbackPlaceHolderValuesSupplier decision supplier {@link Placeholder} * @param <R> * @param <E> {@link Reproducible} * @return {@link Reproducible} */ public <R, E extends Reproducible<E>> E play( Supplier<R> decision, Function<R, E> supplier, Function<R, Object[]> playbackPlaceHolderValuesSupplier) { return play(decision, supplier, playbackPlaceHolderValuesSupplier, lock()); } /** * SQL */ public void clearCache() { Lock lock = lock(); lock.lock(); try { executorCache.clear(); executorMapCache.clear(); } finally { lock.unlock(); } } /** * @return {@link Lock} */ protected abstract Lock lock(); private <E extends Reproducible<E>> Reproducer prepare(Supplier<E> supplier, Lock lock) { Class<?> lambdaClass = supplier.getClass(); Reproducer reproducer = null; lock.lock(); try { if ((reproducer = executorCache.get(lambdaClass)) == null) { E reproducible = supplier.get(); try { Placeholder.start(); //SQLreproducePlaceholder reproducer = new Reproducer(reproducible.reproduce()); } finally { Placeholder.remove(); } executorCache.put(lambdaClass, reproducer); } } finally { lock.unlock(); } return reproducer; } @SuppressWarnings("unchecked") private <R, E extends Reproducible<E>> E play( Supplier<R> decision, Function<R, E> supplier, Function<R, Object[]> playbackPlaceHolderValuesSupplier, Lock lock) { Class<?> lambdaClass = supplier.getClass(); R result = decision.get(); Object[] values = playbackPlaceHolderValuesSupplier.apply(result); Reproducer reproducer = null; lock.lock(); try { Map<R, Reproducer> map = (Map<R, Reproducer>) executorMapCache.get(lambdaClass); if (map == null) { map = new HashMap<>(); executorMapCache.put(lambdaClass, map); } reproducer = map.get(result); if (reproducer == null) { E reproducible = supplier.apply(result); try { Placeholder.start(); //SQLreproducePlaceholder reproducer = new Reproducer(reproducible.reproduce()); } finally { Placeholder.remove(); } map.put(result, reproducer); } } finally { lock.unlock(); } return (E) reproducer.reproduce(values); } private static class Reproducer { private final Reproducible<?> reproducible; private final Object[] values; private final int[] placeholderIndexes; private Reproducer(Reproducible<?> reproducible) { this.reproducible = reproducible; List<Integer> indexes = Placeholder.getIndexes(); Binder[] binders = reproducible.currentBinders().clone(); values = new Object[binders.length]; for (int i = 0; i < binders.length; i++) { values[i] = binders[i].getValue(); } placeholderIndexes = new int[indexes.size()]; for (int i = 0; i < placeholderIndexes.length; i++) { placeholderIndexes[i] = indexes.get(i) - 1; } } private Reproducible<?> reproduce(Object[] newValues) { if (newValues.length != placeholderIndexes.length) throw new IllegalStateException(" " + placeholderIndexes.length + " "); Object[] clone = values.clone(); for (int i = 0; i < newValues.length; i++) { clone[placeholderIndexes[i]] = newValues[i]; } return reproducible.reproduce(clone); } private Reproducible<?> reproduce() { return reproducible.reproduce(); } } }
package gov.nih.nci.common.util; import java.util.Properties; import org.apache.log4j.Logger; public class SecurityConfiguration { private static Logger log= Logger.getLogger(SecurityConfiguration.class.getName()); private static String applicationName; private static Integer securityLevel; private static Long securitySessionTimeout; private static boolean authenticationDisabled; static { try { Properties _properties = new Properties(); _properties.load(Thread.currentThread().getContextClassLoader().getResourceAsStream("CORESystem.properties")); applicationName = _properties.getProperty("CSM_APPLICATION_NAME"); String secLevel = _properties.getProperty("CSM_DEFAULT_SECURITY_LEVEL"); String sessionTimeout = _properties.getProperty("CSM_DEFAULT_SESSION_TIMEOUT"); String authDisabled = _properties.getProperty("CSM_AUTHENTICATION_DISABLED"); if(secLevel!=null) { try { securityLevel=new Integer(secLevel); } catch(NumberFormatException e) { securityLevel=new Integer(0); } } else { securityLevel=new Integer(0); } if(sessionTimeout!=null) { try { securitySessionTimeout=new Long(sessionTimeout); } catch(NumberFormatException e) { securitySessionTimeout=new Long(60000); } } else { securitySessionTimeout=new Long(60000); } if(authDisabled!=null && "yes".equalsIgnoreCase(authDisabled)) authenticationDisabled=true; else authenticationDisabled=false; } catch(Exception e) { log.error("Error while reading the CORESystem.properties file."); securitySessionTimeout=new Long(60000); applicationName=""; securityLevel=new Integer(0); authenticationDisabled=false; } } private SecurityConfiguration(){} public static String getApplicationName() { return applicationName; } public static Integer getSecurityLevel() { return securityLevel; } public static Long getSecuritySessionTimeout() { return securitySessionTimeout; } public static boolean isAuthenticationDisabled() { return authenticationDisabled; } }
package org.exist.backup; import org.exist.Namespaces; import org.exist.dom.DocumentTypeImpl; import org.exist.security.SecurityManager; import org.exist.security.User; import org.exist.storage.DBBroker; import org.exist.util.EXistInputSource; import org.exist.xmldb.CollectionImpl; import org.exist.xmldb.CollectionManagementServiceImpl; import org.exist.xmldb.EXistResource; import org.exist.xmldb.UserManagementService; import org.exist.xmldb.XmldbURI; import org.exist.xquery.XPathException; import org.exist.xquery.util.URIUtils; import org.exist.xquery.value.DateTimeValue; import org.w3c.dom.DocumentType; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xmldb.api.DatabaseManager; import org.xmldb.api.modules.CollectionManagementService; import org.xmldb.api.base.Collection; import org.xmldb.api.base.Resource; import org.xmldb.api.base.XMLDBException; import javax.swing.*; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URISyntaxException; import java.util.Date; import java.util.Observable; import java.util.Stack; import java.util.Properties; /** * Restore.java * * @author Wolfgang Meier */ public class Restore extends DefaultHandler { private BackupDescriptor contents; private String uri; private String username; private String pass; private XMLReader reader; private CollectionImpl current; private Stack stack = new Stack(); private RestoreDialog dialog = null; private int version=0; private RestoreListener listener; private static final int strictUriVersion = 1; /** * Constructor for Restore. * @throws XMLDBException * @throws URISyntaxException */ public Restore(String user, String pass, String newAdminPass, File contents, String uri) throws ParserConfigurationException, SAXException, XMLDBException, URISyntaxException { this.username = user; this.pass = pass; this.uri = uri; this.listener = new DefaultListener(); if (newAdminPass != null) setAdminCredentials(newAdminPass); SAXParserFactory saxFactory = SAXParserFactory.newInstance(); saxFactory.setNamespaceAware(true); saxFactory.setValidating(false); SAXParser sax = saxFactory.newSAXParser(); reader = sax.getXMLReader(); reader.setContentHandler(this); do { BackupDescriptor bd=null; Properties properties = null; try { if(contents.isDirectory()) { bd=new FileSystemBackupDescriptor(new File(contents,BackupDescriptor.COLLECTION_DESCRIPTOR)); } else if(contents.getName().endsWith(".zip") || contents.getName().endsWith(".ZIP")) { bd=new ZipArchiveBackupDescriptor(contents); } else { bd=new FileSystemBackupDescriptor(contents); } properties = bd.getProperties(); } catch(Exception e) { e.printStackTrace(); throw new SAXException("Unable to create backup descriptor object from "+contents,e); } stack.push(bd); // check if the system collection is in the backup. We have to process // this first to create users. //TODO : find a way to make a corespondance with DBRoker's named constants BackupDescriptor sysbd=bd.getChildBackupDescriptor("system"); if (sysbd!=null) { stack.push(sysbd); } contents = null; if (properties != null && properties.getProperty("incremental", "no").equals("yes")) { String previous = properties.getProperty("previous", ""); if (previous.length() > 0) { contents = new File(bd.getParentDir(), previous); if (!contents.canRead()) throw new SAXException("Required part of incremental backup not found: " + contents.getAbsolutePath()); } } } while (contents != null); } public void setListener(RestoreListener listener) { this.listener = listener; } public void restore(boolean showGUI, JFrame parent) throws XMLDBException, FileNotFoundException, IOException, SAXException { if (showGUI) { dialog = new RestoreDialog(parent, "Restoring data ...", false); dialog.setVisible(true); Thread restoreThread = new Thread() { public void run() { while (!stack.isEmpty()) { try { contents = (BackupDescriptor) stack.pop(); dialog.setBackup(contents.getSymbolicPath()); reader.parse(contents.getInputSource()); } catch (FileNotFoundException e) { dialog.displayMessage(e.getMessage()); } catch (IOException e) { dialog.displayMessage(e.getMessage()); } catch (SAXException e) { dialog.displayMessage(e.getMessage()); } } dialog.setVisible(false); } }; restoreThread.start(); if(parent == null) { while (restoreThread.isAlive()) { synchronized (this) { try { wait(20); } catch (InterruptedException e) { } } } } } else { while(!stack.isEmpty()) { contents = (BackupDescriptor) stack.pop(); EXistInputSource is = contents.getInputSource(); is.setEncoding("UTF-8"); //restoring sysId reader.parse(is); } } } /** * @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes) */ public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { if (namespaceURI.equals(Namespaces.EXIST_NS)) { if (localName.equals("collection")) { final String name = atts.getValue("name"); final String owner = atts.getValue("owner"); final String group = atts.getValue("group"); final String mode = atts.getValue("mode"); final String created = atts.getValue("created"); String strVersion = atts.getValue("version"); if(strVersion!=null) { try { this.version = Integer.parseInt(strVersion); } catch (NumberFormatException e) { this.version=0; } } if (name == null) throw new SAXException("collection requires a name " + "attribute"); try { listener.createCollection(name); XmldbURI collUri; if(version >= strictUriVersion) { collUri = XmldbURI.create(name); } else { try { collUri = URIUtils.encodeXmldbUriFor(name); } catch (URISyntaxException e) { listener.warn("Could not parse document name into a URI: "+e.getMessage()); return; } } Date date_created = null; if (created != null) try { date_created = new DateTimeValue(created).getDate(); } catch (XPathException e2) { } current = mkcol(collUri, date_created); if (current == null) throw new SAXException("Collection not found: " + collUri); UserManagementService service = (UserManagementService) current.getService("UserManagementService", "1.0"); User u = new User(owner, null, group); service.chown(u, group); service.chmod(Integer.parseInt(mode, 8)); } catch (Exception e) { listener.warn("An unrecoverable error occurred while restoring\ncollection '" + name + "'. " + "Aborting restore!"); e.printStackTrace(); throw new SAXException(e.getMessage(), e); } if(dialog != null) dialog.setCollection(name); } else if (localName.equals("subcollection")) { String name = atts.getValue("filename"); if (name == null) { name = atts.getValue("name"); } BackupDescriptor subbd = contents.getChildBackupDescriptor(name); if (subbd!=null) stack.push(subbd); else listener.warn(name + " does not exist or is not readable."); } else if (localName.equals("resource")) { String skip = atts.getValue("skip"); if (skip == null || skip.equals("no")) { String type = atts.getValue("type"); if(type == null) type ="XMLResource"; final String name = atts.getValue("name"); final String owner = atts.getValue("owner"); final String group = atts.getValue("group"); final String perms = atts.getValue("mode"); String filename = atts.getValue("filename"); final String mimetype = atts.getValue("mimetype"); final String created = atts.getValue("created"); final String modified = atts.getValue("modified"); final String publicid = atts.getValue("publicid"); final String systemid = atts.getValue("systemid"); final String namedoctype = atts.getValue("namedoctype"); if (filename == null) filename = name; if (name == null) { listener.warn("Wrong entry in backup descriptor: resource requires a name attribute."); } XmldbURI docUri; if(version >= strictUriVersion) { docUri = XmldbURI.create(name); } else { try { docUri = URIUtils.encodeXmldbUriFor(name); } catch (URISyntaxException e) { listener.warn("Could not parse document name into a URI: "+e.getMessage()); return; } } try { if (dialog != null && current instanceof Observable) { ((Observable) current).addObserver(dialog.getObserver()); } if(dialog != null) dialog.setResource(name); final Resource res = current.createResource(docUri.toString(), type); if (mimetype != null) ((EXistResource)res).setMimeType(mimetype); res.setContent(contents.getContent(filename)); // Restoring name Date date_created = null; Date date_modified = null; if (created != null) try { date_created = (new DateTimeValue(created)).getDate(); } catch (XPathException e2) { listener.warn("Illegal creation date. Skipping ..."); } if (modified != null) try { date_modified = (new DateTimeValue(modified)).getDate(); } catch (XPathException e2) { listener.warn("Illegal modification date. Skipping ..."); } current.storeResource(res, date_created, date_modified); if (publicid != null || systemid != null ) { DocumentType doctype = new DocumentTypeImpl(namedoctype,publicid,systemid ); try { ((EXistResource)res).setDocType(doctype); } catch (XMLDBException e1) { e1.printStackTrace(); } } UserManagementService service = (UserManagementService) current.getService("UserManagementService", "1.0"); User u = new User(owner, null, group); try { service.chown(res, u, group); } catch (XMLDBException e1) { listener.warn("Failed to change owner on document '" + name + "'; skipping ..."); } service.chmod(res, Integer.parseInt(perms, 8)); listener.restored(name); } catch (Exception e) { listener.warn("Failed to restore resource '" + name + "'\nfrom file '" + contents.getSymbolicPath(name,false) + "'.\nReason: " + e.getMessage()); e.printStackTrace(); // throw new RuntimeException(e); } } } else if (localName.equals("deleted")) { final String name = atts.getValue("name"); final String type = atts.getValue("type"); if (type.equals("collection")) { try { Collection child = current.getChildCollection(name); if (child != null) { CollectionManagementService cmgt = (CollectionManagementService) current.getService("CollectionManagementService", "1.0"); cmgt.removeCollection(name); } } catch (XMLDBException e) { listener.warn("Failed to remove deleted collection: " + name + ": " + e.getMessage()); } } else if (type.equals("resource")) { try { Resource resource = current.getResource(name); if (resource != null) current.removeResource(resource); } catch (XMLDBException e) { listener.warn("Failed to remove deleted resource: " + name + ": " + e.getMessage()); } } } } } private final CollectionImpl mkcol(XmldbURI collPath, Date created) throws XMLDBException, URISyntaxException { XmldbURI[] segments = collPath.getPathSegments(); CollectionManagementServiceImpl mgtService; Collection c; XmldbURI dbUri; if (!uri.endsWith(DBBroker.ROOT_COLLECTION)) dbUri = XmldbURI.xmldbUriFor(uri + DBBroker.ROOT_COLLECTION); else dbUri = XmldbURI.xmldbUriFor(uri); Collection current = DatabaseManager.getCollection(dbUri.toString(), username, pass); XmldbURI p = XmldbURI.ROOT_COLLECTION_URI; for(int i=1;i<segments.length;i++) { p = p.append(segments[i]); XmldbURI xmldbURI = dbUri.resolveCollectionPath(p); c = DatabaseManager.getCollection(xmldbURI.toString(), username, pass); if (c == null) { mgtService = (CollectionManagementServiceImpl) current.getService( "CollectionManagementService", "1.0"); //current = mgtService.createCollection(token); current = mgtService.createCollection(segments[i], created); } else current = c; } return (CollectionImpl)current; } private void setAdminCredentials(String adminPassword) throws XMLDBException, URISyntaxException { XmldbURI dbUri; if (!uri.endsWith(DBBroker.ROOT_COLLECTION)) dbUri = XmldbURI.xmldbUriFor(uri + DBBroker.ROOT_COLLECTION); else dbUri = XmldbURI.xmldbUriFor(uri); Collection root = DatabaseManager.getCollection(dbUri.toString(), username, pass); UserManagementService mgmt = (UserManagementService) root.getService("UserManagementService", "1.0"); User dba = mgmt.getUser(SecurityManager.DBA_USER); dba.setPassword(adminPassword); mgmt.updateUser(dba); pass = adminPassword; } public static void showErrorMessage(String message) { JTextArea msgArea = new JTextArea(message); msgArea.setEditable(false); msgArea.setBackground(null); JScrollPane scroll = new JScrollPane(msgArea); JOptionPane optionPane = new JOptionPane(); optionPane.setMessage(new Object[]{scroll}); optionPane.setMessageType(JOptionPane.ERROR_MESSAGE); JDialog dialog = optionPane.createDialog(null, "Error"); dialog.setResizable(true); dialog.pack(); dialog.setVisible(true); return; } public interface RestoreListener { void createCollection(String collection); void restored(String resource); void info(String message); void warn(String message); } private class DefaultListener implements RestoreListener { public void createCollection(String collection) { info("creating collection " + collection); } public void restored(String resource) { info("restored " + resource); } public void info(String message) { if (dialog != null) dialog.displayMessage(message); else System.err.println(message); } public void warn(String message) { if (dialog != null) dialog.displayMessage(message); else System.err.println(message); } } }
package com.github.sbugat.nqueens.solvers.bruteforce.instrumentations; import java.util.ArrayList; import java.util.List; import com.github.sbugat.nqueens.GenericInstrumentedNQueensSolver; /** * Very slow brute-force algorithm for the N queens puzzle solver. * * This algorithm is the slowest brute-force algorithm possible and is a floor value for optimisations tests. * * @author Sylvain Bugat * */ public final class SlowBruteForceNQueensSolverWithListsNoQueensLimit extends GenericInstrumentedNQueensSolver { /** Chessboard represented by a list of lists. */ private final List<List<Boolean>> chessboard; public SlowBruteForceNQueensSolverWithListsNoQueensLimit(final int chessboardSizeArg, final boolean printSolutionArg) { super(chessboardSizeArg, printSolutionArg); chessboard = new ArrayList<>(); for (int x = 0; x < chessboardSizeArg; x++) { final List<Boolean> lineList = new ArrayList<>(); for (int y = 0; y < chessboardSizeArg; y++) { lineList.add(Boolean.FALSE); } chessboard.add(lineList); } } @Override public long solve() { // Start the algorithm at the first position methodCallsCount++; solve(0, 0); // Return the number of solutions found return solutionCount; } /** * Solving recursive method, do a greedy algorithm by testing all combinations. * * @param x X position on the chessboard * @param y Y position on the chessboard */ private void solve(final int x, final int y) { // Put a queen on the current position queenPlacementsCount++; methodCallsCount += 2; squareWritesCount++; chessboard.get(x).set(y, Boolean.TRUE); // Test if the chessboard is a solution with exactly N queens explicitTestsCount++; methodCallsCount++; if (checkSolutionChessboard()) { solutionCount++; methodCallsCount++; print(); } else { // Recursive call to the next position final int nextX = (x + 1) % chessboardSize; // Switch to the next line explicitTestsCount++; if (0 == nextX) { // End of the chessboard check explicitTestsCount++; if (y + 1 < chessboardSize) { methodCallsCount++; solve(nextX, y + 1); } } else { methodCallsCount++; solve(nextX, y); } } // Remove the queen on the current position methodCallsCount += 2; squareWritesCount++; chessboard.get(x).set(y, Boolean.FALSE); // Recursive call to the next position final int nextX = (x + 1) % chessboardSize; // Switch to the next line explicitTestsCount++; if (0 == nextX) { // End of the chessboard check explicitTestsCount++; if (y + 1 < chessboardSize) { methodCallsCount++; solve(nextX, y + 1); } } else { methodCallsCount++; solve(nextX, y); } } /** * Check if the chessboard has exactly N queens and is a solution (only one queens per lines, columns and diagnonals). * * @return true if the chessboard contain a solution, false otherwise */ private boolean checkSolutionChessboard() { int placedQueens = 0; // Count all queens on the chessboard implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (chessboard.get(x).get(y).booleanValue()) { placedQueens++; } implicitTestsCount++; } implicitTestsCount++; } explicitTestsCount++; if (placedQueens != chessboardSize) { return false; } // Check if 2 queens are on the same line implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { boolean usedLine = false; implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (chessboard.get(x).get(y).booleanValue()) { if (usedLine) { return false; } usedLine = true; } implicitTestsCount++; } implicitTestsCount++; } // Check if 2 queens are on the same column implicitTestsCount++; for (int x = 0; x < chessboardSize; x++) { boolean usedColumn = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (chessboard.get(x).get(y).booleanValue()) { explicitTestsCount++; if (usedColumn) { return false; } usedColumn = true; } implicitTestsCount++; } implicitTestsCount++; } // Check if 2 queens are on the same descending diagonal implicitTestsCount++; for (int diagonal = 0; diagonal < chessboardSize * 2 - 1; diagonal++) { boolean usedDiagonal = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { final int x = diagonal - y; explicitTestsCount++; if (x >= 0) { explicitTestsCount++; if (x < chessboardSize) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (chessboard.get(x).get(y).booleanValue()) { explicitTestsCount++; if (usedDiagonal) { return false; } usedDiagonal = true; } } } implicitTestsCount++; } implicitTestsCount++; } // Check if 2 queens are on the same ascending diagonal implicitTestsCount++; for (int diagonal = 0; diagonal < chessboardSize * 2 - 1; diagonal++) { boolean usedDiagonal = false; implicitTestsCount++; for (int y = 0; y < chessboardSize; y++) { final int x = diagonal - chessboardSize + 1 + y; explicitTestsCount++; if (x >= 0) { explicitTestsCount++; if (x < chessboardSize) { explicitTestsCount++; methodCallsCount += 3; squareReadsCount++; if (chessboard.get(x).get(y).booleanValue()) { explicitTestsCount++; if (usedDiagonal) { return false; } usedDiagonal = true; } } } implicitTestsCount++; } implicitTestsCount++; } return true; } @Override public boolean getChessboardPosition(final int x, final int y) { return chessboard.get(x).get(y).booleanValue(); } }
package org.shaman.terrain; import com.jme3.collision.MotionAllowedListener; import com.jme3.input.*; import com.jme3.input.controls.*; import com.jme3.math.FastMath; import com.jme3.math.Matrix3f; import com.jme3.math.Quaternion; import com.jme3.math.Vector3f; import com.jme3.renderer.Camera; /** * A first person view camera controller. * After creation, you must register the camera controller with the * dispatcher using #registerWithDispatcher(). * * Controls: * - Move the mouse to rotate the camera * - Mouse wheel for zooming in or out * - WASD keys for moving forward/backward and strafing * - QZ keys raise or lower the camera */ public class CustomFlyByCamera implements AnalogListener, ActionListener { private static String[] mappings = new String[]{ CameraInput.FLYCAM_LEFT, CameraInput.FLYCAM_RIGHT, CameraInput.FLYCAM_UP, CameraInput.FLYCAM_DOWN, CameraInput.FLYCAM_STRAFELEFT, CameraInput.FLYCAM_STRAFERIGHT, CameraInput.FLYCAM_FORWARD, CameraInput.FLYCAM_BACKWARD, CameraInput.FLYCAM_ZOOMIN, CameraInput.FLYCAM_ZOOMOUT, CameraInput.FLYCAM_ROTATEDRAG, CameraInput.FLYCAM_RISE, CameraInput.FLYCAM_LOWER, CameraInput.FLYCAM_INVERTY }; protected Camera cam; protected Vector3f initialUpVec; protected float rotationSpeed = 1f; protected float moveSpeed = 3f; protected float zoomSpeed = 1f; protected MotionAllowedListener motionAllowed = null; protected boolean enabled = true; protected boolean dragToRotate = false; protected boolean canRotate = false; protected boolean invertY = false; protected InputManager inputManager; protected boolean useJoystick = true; protected float joystickMoveThreshold = 0.08f; protected float joystickMoveFactor = 0.01f; protected float joystickRotateThreshold = 0.05f; protected float joystickRotateFactor = 0.005f; protected boolean canRotateSideways = false; protected String joystickName = "SpaceNavigator for Notebooks"; /** * Creates a new FlyByCamera to control the given Camera object. * @param cam */ public CustomFlyByCamera(Camera cam){ this.cam = cam; initialUpVec = cam.getUp().clone(); } /** * Sets the up vector that should be used for the camera. * @param upVec */ public void setUpVector(Vector3f upVec) { initialUpVec.set(upVec); } public void setMotionAllowedListener(MotionAllowedListener listener){ this.motionAllowed = listener; } /** * Sets the move speed. The speed is given in world units per second. * @param moveSpeed */ public void setMoveSpeed(float moveSpeed){ this.moveSpeed = moveSpeed; } /** * Gets the move speed. The speed is given in world units per second. * @return moveSpeed */ public float getMoveSpeed(){ return moveSpeed; } /** * Sets the rotation speed. * @param rotationSpeed */ public void setRotationSpeed(float rotationSpeed){ this.rotationSpeed = rotationSpeed; } /** * Gets the move speed. The speed is given in world units per second. * @return rotationSpeed */ public float getRotationSpeed(){ return rotationSpeed; } /** * Sets the zoom speed. * @param zoomSpeed */ public void setZoomSpeed(float zoomSpeed) { this.zoomSpeed = zoomSpeed; } /** * Gets the zoom speed. The speed is a multiplier to increase/decrease * the zoom rate. * @return zoomSpeed */ public float getZoomSpeed() { return zoomSpeed; } /** * @param enable If false, the camera will ignore input. */ public void setEnabled(boolean enable){ if (enabled && !enable){ if (inputManager!= null && (!dragToRotate || (dragToRotate && canRotate))){ inputManager.setCursorVisible(true); } } enabled = enable; } /** * @return If enabled * @see FlyByCamera#setEnabled(boolean) */ public boolean isEnabled(){ return enabled; } /** * @return If drag to rotate feature is enabled. * * @see FlyByCamera#setDragToRotate(boolean) */ public boolean isDragToRotate() { return dragToRotate; } /** * Set if drag to rotate mode is enabled. * * When true, the user must hold the mouse button * and drag over the screen to rotate the camera, and the cursor is * visible until dragged. Otherwise, the cursor is invisible at all times * and holding the mouse button is not needed to rotate the camera. * This feature is disabled by default. * * @param dragToRotate True if drag to rotate mode is enabled. */ public void setDragToRotate(boolean dragToRotate) { this.dragToRotate = dragToRotate; if (inputManager != null) { inputManager.setCursorVisible(dragToRotate); } } /** * Registers the FlyByCamera to receive input events from the provided * Dispatcher. * @param inputManager */ public void registerWithInput(InputManager inputManager){ this.inputManager = inputManager; // both mouse and button - rotation of cam inputManager.addMapping(CameraInput.FLYCAM_LEFT, new MouseAxisTrigger(MouseInput.AXIS_X, true)); inputManager.addMapping(CameraInput.FLYCAM_RIGHT, new MouseAxisTrigger(MouseInput.AXIS_X, false)); inputManager.addMapping(CameraInput.FLYCAM_UP, new MouseAxisTrigger(MouseInput.AXIS_Y, false)); inputManager.addMapping(CameraInput.FLYCAM_DOWN, new MouseAxisTrigger(MouseInput.AXIS_Y, true)); // mouse only - zoom in/out with wheel, and rotate drag //inputManager.addMapping(CameraInput.FLYCAM_ZOOMIN, new MouseAxisTrigger(MouseInput.AXIS_WHEEL, false)); //inputManager.addMapping(CameraInput.FLYCAM_ZOOMOUT, new MouseAxisTrigger(MouseInput.AXIS_WHEEL, true)); inputManager.addMapping(CameraInput.FLYCAM_ROTATEDRAG, new MouseButtonTrigger(MouseInput.BUTTON_RIGHT/*BUTTON_LEFT*/)); // keyboard only WASD for movement and WZ for rise/lower height inputManager.addMapping(CameraInput.FLYCAM_STRAFELEFT, new KeyTrigger(KeyInput.KEY_A)); inputManager.addMapping(CameraInput.FLYCAM_STRAFERIGHT, new KeyTrigger(KeyInput.KEY_D)); inputManager.addMapping(CameraInput.FLYCAM_FORWARD, new KeyTrigger(KeyInput.KEY_W)); inputManager.addMapping(CameraInput.FLYCAM_BACKWARD, new KeyTrigger(KeyInput.KEY_S)); inputManager.addMapping(CameraInput.FLYCAM_RISE, new KeyTrigger(KeyInput.KEY_Q)); inputManager.addMapping(CameraInput.FLYCAM_LOWER, new KeyTrigger(KeyInput.KEY_Z)); inputManager.addListener(this, mappings); inputManager.setCursorVisible(dragToRotate || !isEnabled()); Joystick[] joysticks = inputManager.getJoysticks(); if (joysticks != null && joysticks.length > 0){ for (Joystick j : joysticks) { mapJoystick(j); } } } protected void mapJoystick( Joystick joystick ) { System.out.println("Joystick: "+joystick.getName()); System.out.println("Axes: "+joystick.getAxes()); if (joystickName.equals(joystick.getName())) { //map SpaceNavigator JoystickAxis x = joystick.getAxis("x"); inputManager.addMapping("JoyX-", new JoyAxisTrigger(joystick.getJoyId(), x.getAxisId(), false)); inputManager.addMapping("JoyX+", new JoyAxisTrigger(joystick.getJoyId(), x.getAxisId(), true)); JoystickAxis y = joystick.getAxis("y"); inputManager.addMapping("JoyY-", new JoyAxisTrigger(joystick.getJoyId(), y.getAxisId(), false)); inputManager.addMapping("JoyY+", new JoyAxisTrigger(joystick.getJoyId(), y.getAxisId(), true)); JoystickAxis z = joystick.getAxis("z"); inputManager.addMapping("JoyZ-", new JoyAxisTrigger(joystick.getJoyId(), z.getAxisId(), false)); inputManager.addMapping("JoyZ+", new JoyAxisTrigger(joystick.getJoyId(), z.getAxisId(), true)); JoystickAxis rx = joystick.getAxis("rx"); inputManager.addMapping("JoyRX-", new JoyAxisTrigger(joystick.getJoyId(), rx.getAxisId(), false)); inputManager.addMapping("JoyRX+", new JoyAxisTrigger(joystick.getJoyId(), rx.getAxisId(), true)); JoystickAxis ry = joystick.getAxis("ry"); inputManager.addMapping("JoyRY-", new JoyAxisTrigger(joystick.getJoyId(), ry.getAxisId(), false)); inputManager.addMapping("JoyRY+", new JoyAxisTrigger(joystick.getJoyId(), ry.getAxisId(), true)); JoystickAxis rz = joystick.getAxis("rz"); inputManager.addMapping("JoyRZ-", new JoyAxisTrigger(joystick.getJoyId(), rz.getAxisId(), false)); inputManager.addMapping("JoyRZ+", new JoyAxisTrigger(joystick.getJoyId(), rz.getAxisId(), true)); inputManager.addListener(this, "JoyX-", "JoyX+", "JoyY-", "JoyY+", "JoyZ-", "JoyZ+", "JoyRX-", "JoyRX+", "JoyRY-", "JoyRY+", "JoyRZ-", "JoyRZ+"); System.out.println("joystick added"); } // // Map it differently if there are Z axis // if( joystick.getAxis( JoystickAxis.Z_ROTATION ) != null && joystick.getAxis( JoystickAxis.Z_AXIS ) != null ) { // // Make the left stick move // joystick.getXAxis().assignAxis( CameraInput.FLYCAM_STRAFERIGHT, CameraInput.FLYCAM_STRAFELEFT ); // joystick.getYAxis().assignAxis( CameraInput.FLYCAM_BACKWARD, CameraInput.FLYCAM_FORWARD ); // // And the right stick control the camera // joystick.getAxis( JoystickAxis.Z_ROTATION ).assignAxis( CameraInput.FLYCAM_DOWN, CameraInput.FLYCAM_UP ); // joystick.getAxis( JoystickAxis.Z_AXIS ).assignAxis( CameraInput.FLYCAM_RIGHT, CameraInput.FLYCAM_LEFT ); // // And let the dpad be up and down // joystick.getPovYAxis().assignAxis(CameraInput.FLYCAM_RISE, CameraInput.FLYCAM_LOWER); // if( joystick.getButton( "Button 8" ) != null ) { // // Let the stanard select button be the y invert toggle // joystick.getButton( "Button 8" ).assignButton( CameraInput.FLYCAM_INVERTY ); // } else { // joystick.getPovXAxis().assignAxis(CameraInput.FLYCAM_STRAFERIGHT, CameraInput.FLYCAM_STRAFELEFT); // joystick.getPovYAxis().assignAxis(CameraInput.FLYCAM_FORWARD, CameraInput.FLYCAM_BACKWARD); // joystick.getXAxis().assignAxis(CameraInput.FLYCAM_RIGHT, CameraInput.FLYCAM_LEFT); // joystick.getYAxis().assignAxis(CameraInput.FLYCAM_DOWN, CameraInput.FLYCAM_UP); } /** * Registers the FlyByCamera to receive input events from the provided * Dispatcher. */ public void unregisterInput(){ if (inputManager == null) { return; } for (String s : mappings) { if (inputManager.hasMapping(s)) { inputManager.deleteMapping( s ); } } inputManager.removeListener(this); inputManager.setCursorVisible(!dragToRotate); Joystick[] joysticks = inputManager.getJoysticks(); if (joysticks != null && joysticks.length > 0){ Joystick joystick = joysticks[0]; // No way to unassing axis } } protected void rotateCamera(float value, Vector3f axis, boolean ignoreDragging){ if (dragToRotate && !ignoreDragging){ if (canRotate){ // value = -value; }else{ return; } } Matrix3f mat = new Matrix3f(); mat.fromAngleNormalAxis(rotationSpeed * value, axis); Vector3f up = cam.getUp(); Vector3f left = cam.getLeft(); Vector3f dir = cam.getDirection(); mat.mult(up, up); mat.mult(left, left); mat.mult(dir, dir); Quaternion q = new Quaternion(); q.fromAxes(left, up, dir); q.normalizeLocal(); cam.setAxes(q); } protected void zoomCamera(float value){ // derive fovY value float h = cam.getFrustumTop(); float w = cam.getFrustumRight(); float aspect = w / h; float near = cam.getFrustumNear(); float fovY = FastMath.atan(h / near) / (FastMath.DEG_TO_RAD * .5f); float newFovY = fovY + value * 0.1f * zoomSpeed; if (newFovY > 0f) { // Don't let the FOV go zero or negative. fovY = newFovY; } h = FastMath.tan( fovY * FastMath.DEG_TO_RAD * .5f) * near; w = h * aspect; cam.setFrustumTop(h); cam.setFrustumBottom(-h); cam.setFrustumLeft(-w); cam.setFrustumRight(w); } protected void riseCamera(float value){ Vector3f vel = new Vector3f(0, value * moveSpeed, 0); Vector3f pos = cam.getLocation().clone(); if (motionAllowed != null) motionAllowed.checkMotionAllowed(pos, vel); else pos.addLocal(vel); cam.setLocation(pos); } protected void moveCamera(float value, boolean sideways){ Vector3f vel = new Vector3f(); Vector3f pos = cam.getLocation().clone(); if (sideways){ cam.getLeft(vel); }else{ cam.getDirection(vel); } vel.multLocal(value * moveSpeed); if (motionAllowed != null) motionAllowed.checkMotionAllowed(pos, vel); else pos.addLocal(vel); cam.setLocation(pos); } public void onAnalog(String name, float value, float tpf) { if (!enabled) return; System.out.println(name+": "+value); if (name.equals(CameraInput.FLYCAM_LEFT)){ rotateCamera(value, initialUpVec, false); }else if (name.equals(CameraInput.FLYCAM_RIGHT)){ rotateCamera(-value, initialUpVec, false); }else if (name.equals(CameraInput.FLYCAM_UP)){ rotateCamera(-value * (invertY ? -1 : 1), cam.getLeft(), false); }else if (name.equals(CameraInput.FLYCAM_DOWN)){ rotateCamera(value * (invertY ? -1 : 1), cam.getLeft(), false); }else if (name.equals(CameraInput.FLYCAM_FORWARD)){ moveCamera(value, false); }else if (name.equals(CameraInput.FLYCAM_BACKWARD)){ moveCamera(-value, false); }else if (name.equals(CameraInput.FLYCAM_STRAFELEFT)){ moveCamera(value, true); }else if (name.equals(CameraInput.FLYCAM_STRAFERIGHT)){ moveCamera(-value, true); }else if (name.equals(CameraInput.FLYCAM_RISE)){ riseCamera(value); }else if (name.equals(CameraInput.FLYCAM_LOWER)){ riseCamera(-value); }else if (name.equals(CameraInput.FLYCAM_ZOOMIN)){ zoomCamera(value); }else if (name.equals(CameraInput.FLYCAM_ZOOMOUT)){ zoomCamera(-value); } else if (name.equals("JoyX-") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; moveCamera(-v, true); } else if (name.equals("JoyX+") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; moveCamera(v, true); } else if (name.equals("JoyY-") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; moveCamera(-v, false); } else if (name.equals("JoyY+") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; moveCamera(v, false); } else if (name.equals("JoyZ-") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; riseCamera(-v); } else if (name.equals("JoyZ+") && useJoystick) { float v = Math.max(0, value-joystickMoveThreshold) * joystickMoveFactor; riseCamera(v); } else if (name.equals("JoyRX-") && useJoystick) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(-v, cam.getLeft(), true); } else if (name.equals("JoyRX+") && useJoystick) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(v, cam.getLeft(), true); } else if (name.equals("JoyRY-") && useJoystick && canRotateSideways) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(-v, cam.getDirection(), true); } else if (name.equals("JoyRY+") && useJoystick && canRotateSideways) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(v, cam.getDirection(), true); } else if (name.equals("JoyRZ-") && useJoystick) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(-v, initialUpVec, true); } else if (name.equals("JoyRZ+") && useJoystick) { float v = Math.max(0, value-joystickRotateThreshold) * joystickRotateFactor; rotateCamera(v, initialUpVec, true); } } public void onAction(String name, boolean value, float tpf) { if (!enabled) return; if (name.equals(CameraInput.FLYCAM_ROTATEDRAG) && dragToRotate){ canRotate = value; inputManager.setCursorVisible(!value); } else if (name.equals(CameraInput.FLYCAM_INVERTY)) { // Toggle on the up. if( !value ) { invertY = !invertY; } } } }
package org.joni; import static org.joni.Config.USE_SUNDAY_QUICK_SEARCH; import org.jcodings.Encoding; import org.jcodings.IntHolder; abstract class SearchAlgorithm { public abstract String getName(); public abstract int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange); public abstract int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_); public static final SearchAlgorithm NONE = new SearchAlgorithm() { public final String getName() { return "NONE"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { return textP; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { return textP; } }; public static final SearchAlgorithm SLOW = new SearchAlgorithm() { public final String getName() { return "EXACT"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int end = textEnd; end -= targetEnd - targetP - 1; if (end > textRange) end = textRange; int s = textP; while (s < end) { if (text[s] == target[targetP]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != text[p++]) break; t++; } if (t == targetEnd) return s; } s += enc.length(text, s, textEnd); } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int s = textEnd; s -= targetEnd - targetP; if (s > textStart) { s = textStart; } else { s = enc.leftAdjustCharHead(text, adjustText, s, textEnd); } while (s >= textP) { if (text[s] == target[targetP]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != text[p++]) break; t++; } if (t == targetEnd) return s; } s = enc.prevCharHead(text, adjustText, s, textEnd); } return -1; } }; public static final SearchAlgorithm SLOW_SB = new SearchAlgorithm() { public final String getName() { return "EXACT_SB"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int end = textEnd; end -= targetEnd - targetP - 1; if (end > textRange) end = textRange; int s = textP; while (s < end) { if (text[s] == target[targetP]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != text[p++]) break; t++; } if (t == targetEnd) return s; } s++; } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int s = textEnd; s -= targetEnd - targetP; if (s > textStart) s = textStart; while (s >= textP) { if (text[s] == target[targetP]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != text[p++]) break; t++; } if (t == targetEnd) return s; } //s = s <= adjustText ? -1 : s - 1; s } return -1; } }; public static final SearchAlgorithm SLOW_IC = new SearchAlgorithm() { public final String getName() { return "EXACT_IC"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int end = textEnd; end -= targetEnd - targetP - 1; if (end > textRange) end = textRange; int s = textP; byte[]buf = matcher.icbuf(); while (s < end) { if (lowerCaseMatch(target, targetP, targetEnd, text, s, textEnd, enc, buf, regex.caseFoldFlag)) return s; s += enc.length(text, s, textEnd); } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int s = textEnd; s -= targetEnd - targetP; if (s > textStart) { s = textStart; } else { s = enc.leftAdjustCharHead(text, adjustText, s, textEnd); } byte[]buf = matcher.icbuf(); while (s >= textP) { if (lowerCaseMatch(target, targetP, targetEnd, text, s, textEnd, enc, buf, regex.caseFoldFlag)) return s; s = enc.prevCharHead(text, adjustText, s, textEnd); } return -1; } private boolean lowerCaseMatch(byte[]t, int tP, int tEnd, byte[]bytes, int p, int end, Encoding enc, byte[]buf, int caseFoldFlag) { final IntHolder holder = new IntHolder(); holder.value = p; while (tP < tEnd) { int lowlen = enc.mbcCaseFold(caseFoldFlag, bytes, holder, end, buf); if (lowlen == 1) { if (t[tP++] != buf[0]) return false; } else { int q = 0; while (lowlen > 0) { if (t[tP++] != buf[q++]) return false; lowlen } } } return true; } }; public static final SearchAlgorithm SLOW_IC_SB = new SearchAlgorithm() { public final String getName() { return "EXACT_IC_SB"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; final byte[]toLowerTable = regex.enc.toLowerCaseTable(); byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int end = textEnd; end -= targetEnd - targetP - 1; if (end > textRange) end = textRange; int s = textP; while (s < end) { if (target[targetP] == toLowerTable[text[s] & 0xff]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != toLowerTable[text[p++] & 0xff]) break; t++; } if (t == targetEnd) return s; } s++; } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; final byte[]toLowerTable = regex.enc.toLowerCaseTable(); byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int s = textEnd; s -= targetEnd - targetP; if (s > textStart) s = textStart; while (s >= textP) { if (target[targetP] == toLowerTable[text[s] & 0xff]) { int p = s + 1; int t = targetP + 1; while (t < targetEnd) { if (target[t] != toLowerTable[text[p++] & 0xff]) break; t++; } if (t == targetEnd) return s; } //s = s <= adjustText ? -1 : s - 1; s } return -1; } }; public static final SearchAlgorithm BM = new SearchAlgorithm() { public final String getName() { return "EXACT_BM"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; if (Config.DEBUG_SEARCH) Config.log.println("bm_search: text: " + textP + ", text_end: " + textEnd + ", text_range: " + textRange); int end, s; int tail = targetEnd - 1; if (USE_SUNDAY_QUICK_SEARCH) { int tlen1 = tail - targetP; end = textRange + tlen1; s = textP + tlen1; } else { end = textRange + (targetEnd - targetP) - 1; s = textP + (targetEnd - targetP) - 1; } if (end > textEnd) end = textEnd; if (regex.intMap == null) { while (s < end) { int p = s; int t = tail; while (text[p] == target[t]) { if (t == targetP) return p; p } if (USE_SUNDAY_QUICK_SEARCH && (s + 1 >= end)) break; s += regex.map[text[USE_SUNDAY_QUICK_SEARCH ? s + 1 : s] & 0xff]; } } else { /* see int_map[] */ while (s < end) { int p = s; int t = tail; while (text[p] == target[t]) { if (t == targetP) return p; p } if (USE_SUNDAY_QUICK_SEARCH && (s + 1 >= end)) break; s += regex.intMap[text[USE_SUNDAY_QUICK_SEARCH ? s + 1 : s] & 0xff]; } } return -1; } private static final int BM_BACKWARD_SEARCH_LENGTH_THRESHOLD = 100; public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; if (regex.intMapBackward == null) { if (s_ - range_ < BM_BACKWARD_SEARCH_LENGTH_THRESHOLD) { // goto exact_method; return SLOW.searchBackward(matcher, text, textP, adjustText, textEnd, textStart, s_, range_); } setBmBackwardSkip(regex, target, targetP, targetEnd); } int s = textEnd - (targetEnd - targetP); if (textStart < s) { s = textStart; } else { s = enc.leftAdjustCharHead(text, adjustText, s, textEnd); } while (s >= textP) { int p = s; int t = targetP; while (t < targetEnd && text[p] == target[t]) { p++; t++; } if (t == targetEnd) return s; s -= regex.intMapBackward[text[s] & 0xff]; s = enc.leftAdjustCharHead(text, adjustText, s, textEnd); } return -1; } private void setBmBackwardSkip(Regex regex, byte[]bytes, int p, int end) { final int[] skip; if (regex.intMapBackward == null) { regex.intMapBackward = skip = new int[Config.CHAR_TABLE_SIZE]; } else { skip = regex.intMapBackward; } int len = end - p; for (int i = 0; i < Config.CHAR_TABLE_SIZE; i++) skip[i] = len; for (int i = len - 1; i > 0; i--) skip[bytes[i] & 0xff] = i; } }; public static final SearchAlgorithm BM_IC = new SearchAlgorithm() { public final String getName() { return "EXACT_BM_IC"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { return textP; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { return textP; } }; public static final SearchAlgorithm BM_NOT_REV = new SearchAlgorithm() { public final String getName() { return "EXACT_BM_NOT_REV"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]target = regex.exact; int targetP = regex.exactP; int targetEnd = regex.exactEnd; int tail = targetEnd - 1; int tlen1 = tail - targetP; int end = textRange; if (Config.DEBUG_SEARCH) Config.log.println("bm_search_notrev: text: " + textP + ", text_end: " + textEnd + ", text_range: " + textRange); if (end + tlen1 > textEnd) end = textEnd - tlen1; int s = textP; if (regex.intMap == null) { while (s < end) { int p, se; p = se = s + tlen1; int t = tail; while (text[p] == target[t]) { if (t == targetP) return s; p } if (USE_SUNDAY_QUICK_SEARCH && (s + 1 >= end)) break; int skip = regex.map[text[USE_SUNDAY_QUICK_SEARCH ? se + 1 : se] & 0xff]; t = s; do { s += enc.length(text, s, textEnd); } while ((s - t) < skip && s < end); } } else { while (s < end) { int p, se; p = se = s + tlen1; int t = tail; while (text[p] == target[t]) { if (t == targetP) return s; p } if (USE_SUNDAY_QUICK_SEARCH && (s + 1 >= end)) break; int skip = regex.intMap[text[USE_SUNDAY_QUICK_SEARCH ? se + 1 : se] & 0xff]; t = s; do { s += enc.length(text, s, textEnd); } while ((s - t) < skip && s < end); } } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { return BM.searchBackward(matcher, text, textP, adjustText, textEnd, textStart, s_, range_); } }; public static final SearchAlgorithm MAP = new SearchAlgorithm() { public final String getName() { return "MAP"; } // TODO: check 1.9 inconsistent calls to map_search public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]map = regex.map; int s = textP; while (s < textRange) { if (map[text[s] & 0xff] != 0) return s; s += enc.length(text, s, textEnd); } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; Encoding enc = regex.enc; byte[]map = regex.map; int s = textStart; if (s >= textEnd) s = textEnd - 1; // multibyte safe ? while (s >= textP) { if (map[text[s] & 0xff] != 0) return s; s = enc.prevCharHead(text, adjustText, s, textEnd); } return -1; } }; public static final SearchAlgorithm MAP_SB = new SearchAlgorithm() { public final String getName() { return "MAP_SB"; } public final int search(Matcher matcher, byte[]text, int textP, int textEnd, int textRange) { Regex regex = matcher.regex; byte[]map = regex.map; int s = textP; while (s < textRange) { if (map[text[s] & 0xff] != 0) return s; s++; } return -1; } public final int searchBackward(Matcher matcher, byte[]text, int textP, int adjustText, int textEnd, int textStart, int s_, int range_) { Regex regex = matcher.regex; byte[]map = regex.map; int s = textStart; if (s >= textEnd) s = textEnd - 1; while (s >= textP) { if (map[text[s] & 0xff] != 0) return s; s } return -1; } }; }
// $Id: PlaceManager.java,v 1.29 2002/04/17 22:02:08 mdb Exp $ package com.threerings.crowd.server; import java.util.ArrayList; import java.util.HashMap; import java.util.Properties; import com.threerings.presents.dobj.DObjectManager; import com.threerings.presents.dobj.MessageEvent; import com.threerings.presents.dobj.MessageListener; import com.threerings.presents.dobj.ObjectAddedEvent; import com.threerings.presents.dobj.ObjectDeathListener; import com.threerings.presents.dobj.ObjectDestroyedEvent; import com.threerings.presents.dobj.ObjectRemovedEvent; import com.threerings.presents.dobj.OidListListener; import com.threerings.crowd.Log; import com.threerings.crowd.data.BodyObject; import com.threerings.crowd.data.OccupantInfo; import com.threerings.crowd.data.PlaceConfig; import com.threerings.crowd.data.PlaceObject; /** * The place manager is the server-side entity that handles all * place-related interaction. It subscribes to the place object and reacts * to message and other events. Behavior specific to a place (or class of * places) should live in the place manager. An intelligently constructed * hierarchy of place manager classes working in concert with invocation * services should provide the majority of the server-side functionality * of an application built on the Presents platform. * * <p> The base place manager class takes care of the necessary * interactions with the place registry to manage place registration. It * handles the place-related component of chatting. It also provides the * basis for place-based access control. * * <p> A derived class is expected to handle initialization, cleanup and * operational functionality via the calldown functions {@link #didInit}, * {@link #didStartup}, and {@link #didShutdown} as well as through event * listeners. */ public class PlaceManager implements MessageListener, OidListListener, ObjectDeathListener { /** * An interface used to allow the registration of standard message * handlers to be invoked by the place manager when particular types * of message events are received. */ public static interface MessageHandler { /** * Invokes this message handler on the supplied event. * * @param event the message event received. * @param pmgr the place manager for which the message is being * handled. */ public void handleEvent (MessageEvent event, PlaceManager pmgr); } /** * Returns a reference to our place configuration object. */ public PlaceConfig getConfig () { return _config; } /** * Returns the place object managed by this place manager. */ public PlaceObject getPlaceObject () { return _plobj; } /** * A place manager derived class is likely to have a corresponding * derived class of {@link com.threerings.crowd.data.PlaceObject} that * it will be managing. Derived classes should override this method * and return the class object for the place object derived class they * desire to use. The place registry will use this method to create * the proper place object during the place creation process. * * @return the class of the class, derived from {@link PlaceObject}, * that this manager wishes to manage. * * @see PlaceRegistry#createPlace */ protected Class getPlaceObjectClass () { return PlaceObject.class; } /** * Called by the place registry after creating this place manager. */ public void init ( PlaceRegistry registry, PlaceConfig config, DObjectManager omgr) { _registry = registry; _config = config; _omgr = omgr; // let derived classes do initialization stuff didInit(); } /** * Called after this place manager has been initialized with its * configuration information but before it has been started up with * its place object reference. Derived classes can override this * function and perform any basic initialization that they desire. * They should of course be sure to call <code>super.didInit()</code>. */ protected void didInit () { // initialize our delegates applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.didInit(_config); } }); } /** * Called by the place manager after the place object has been * successfully created. */ public void startup (PlaceObject plobj) { // keep track of this _plobj = plobj; // we'll need to hear about place object events plobj.addListener(this); // let our derived classes do their thang didStartup(); } /** * Derived classes should override this (and be sure to call * <code>super.didStartup()</code>) to perform any startup time * initialization. The place object will be available by the time this * method is executed. */ protected void didStartup () { // let our delegates know that we've started up applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.didStartup(_plobj); } }); } /** * Causes the place object being managed by this place manager to be * destroyed and the place manager to shut down. */ public void shutdown () { // destroy the object and everything will follow from that CrowdServer.omgr.destroyObject(_plobj.getOid()); } /** * Called when this place has been destroyed and the place manager has * shut down (via a call to {@link #shutdown}). Derived classes can * override this method and perform any necessary shutdown time * processing. */ protected void didShutdown () { // let our delegates know that we've shut down applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.didShutdown(); } }); } /** * Returns the appropriate derived class of {@link OccupantInfo} that * will be used to provide occupant info for this body. An occupant * info record is created when a body enters a place. * * @param body the body that is entering the place and for whom we are * creating an occupant info record. */ protected Class getOccupantInfoClass (BodyObject body) { return OccupantInfo.class; } /** * Builds an occupant info record for the specified body object. This * is called by the location services when a body enters a place. It * should not be overridden by derived classes, they should override * {@link #populateOccupantInfo}, which is set up for that sort of * thing. */ public OccupantInfo buildOccupantInfo (BodyObject body) { // create a new occupant info instance try { OccupantInfo info = (OccupantInfo) getOccupantInfoClass(body).newInstance(); populateOccupantInfo(info, body); return info; } catch (Exception e) { Log.warning("Failure building occupant info " + "[body=" + body + "]."); Log.logStackTrace(e); return null; } } /** * Derived classes should override this method if they are making use * of a derived occupant info class. They should call the super * implementation and then populate the occupant info fields in their * extended object. */ protected void populateOccupantInfo (OccupantInfo info, BodyObject body) { // the base occupant info is only their username info.bodyOid = new Integer(body.getOid()); info.username = body.username; } /** * Called when a body object enters this place. */ protected void bodyEntered (final int bodyOid) { Log.info("Body entered [ploid=" + _plobj.getOid() + ", oid=" + bodyOid + "]."); // let our delegates know what's up applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.bodyEntered(bodyOid); } }); } /** * Called when a body object leaves this place. */ protected void bodyLeft (final int bodyOid) { Log.info("Body left [ploid=" + _plobj.getOid() + ", oid=" + bodyOid + "]."); // if their occupant info hasn't been removed (which may be the // case if they logged off rather than left via a MoveTo request), // we need to get it on out of here Object key = new Integer(bodyOid); if (_plobj.occupantInfo.containsKey(key)) { _plobj.removeFromOccupantInfo(key); } // let our delegates know what's up applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.bodyLeft(bodyOid); } }); // if that leaves us with zero occupants, maybe do something if (_plobj.occupants.size() == 0) { placeBecameEmpty(); } } /** * Called when we transition from having bodies in the place to not * having any bodies in the place. Some places may take this as a sign * to pack it in, others may wish to stick around. In any case, they * can override this method to do their thing. */ protected void placeBecameEmpty () { // let our delegates know what's up applyToDelegates(new DelegateOp() { public void apply (PlaceManagerDelegate delegate) { delegate.placeBecameEmpty(); } }); } /** * Registers a particular message handler instance to be used when * processing message events with the specified name. * * @param name the message name of the message events that should be * handled by this handler. * @param handler the handler to be registered. */ public void registerMessageHandler (String name, MessageHandler handler) { // create our handler map if necessary if (_msghandlers == null) { _msghandlers = new HashMap(); } _msghandlers.put(name, handler); } /** * Dispatches message events to registered message handlers. Derived * classes should probably register message handlers rather than * override this method directly. */ public void messageReceived (MessageEvent event) { MessageHandler handler = null; if (_msghandlers != null) { handler = (MessageHandler)_msghandlers.get(event.getName()); } if (handler != null) { handler.handleEvent(event, this); } } /** * Handles occupant arrival into the place. Derived classes may need * to override this method to handle other oid lists in their derived * place objects. They should be sure to call * <code>super.objectAdded</code> if the event is one they don't * explicitly handle. */ public void objectAdded (ObjectAddedEvent event) { if (event.getName().equals(PlaceObject.OCCUPANTS)) { bodyEntered(event.getOid()); } } /** * Handles occupant departure from the place. Derived classes may need * to override this method to handle other oid lists in their derived * place objects. They should be sure to call * <code>super.objectRemoved</code> if the event is one they don't * explicitly handle. */ public void objectRemoved (ObjectRemovedEvent event) { if (event.getName().equals(PlaceObject.OCCUPANTS)) { bodyLeft(event.getOid()); } } /** * Handles place destruction. We shut ourselves down and ask the place * registry to unmap us. */ public void objectDestroyed (ObjectDestroyedEvent event) { // unregister ourselves _registry.unmapPlaceManager(this); // let our derived classes and delegates shut themselves down didShutdown(); } /** * Generates a string representation of this manager. Does so in a way * that makes it easier for derived classes to add to the string * representation. * * @see #toString(StringBuffer) */ public String toString () { StringBuffer buf = new StringBuffer(); buf.append("["); toString(buf); buf.append("]"); return buf.toString(); } /** * An extensible way to add to the string representation of this * class. Override this (being sure to call super) and append your * info to the buffer. */ protected void toString (StringBuffer buf) { buf.append("place=").append(_plobj); buf.append(", config=").append(_config); } /** * Adds the supplied delegate to the list for this manager. */ protected void addDelegate (PlaceManagerDelegate delegate) { if (_delegates == null) { _delegates = new ArrayList(); } _delegates.add(delegate); } /** * Used to call methods in delegates. */ protected static interface DelegateOp { public void apply (PlaceManagerDelegate delegate); } /** * Applies the supplied operation to the registered delegates. */ protected void applyToDelegates (DelegateOp op) { if (_delegates != null) { int dcount = _delegates.size(); for (int i = 0; i < dcount; i++) { op.apply((PlaceManagerDelegate)_delegates.get(i)); } } } /** A distributed object manager for doing dobj stuff. */ protected DObjectManager _omgr; /** A reference to the place object that we manage. */ protected PlaceObject _plobj; /** A reference to the configuration for our place. */ protected PlaceConfig _config; /** A reference to the place registry with which we're registered. */ protected PlaceRegistry _registry; /** Message handlers are used to process message events. */ protected HashMap _msghandlers; /** A list of the delegates in use by this manager. */ protected ArrayList _delegates; }
package org.jruby.debug; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.Map; import org.jruby.Ruby; import org.jruby.RubyArray; import org.jruby.RubyClass; import org.jruby.RubyFixnum; import org.jruby.RubyHash; import org.jruby.RubyString; import org.jruby.RubyThread; import org.jruby.debug.DebugBreakpoint.Type; import org.jruby.runtime.Arity; import org.jruby.runtime.Block; import org.jruby.runtime.builtin.IRubyObject; final class Debugger { private DebugEventHook debugEventHook; private Map<RubyThread, Context> threadsTable; private IRubyObject breakpoints; private IRubyObject catchpoints; private boolean tracing; private boolean postMortem; private boolean keepFrameBinding; private boolean debug; private boolean trackFrameArgs; private IRubyObject lastContext; private IRubyObject lastThread; private boolean started; private int startCount; /** Used to for unique breakpoint ID for newly added breakpoints. */ private int lastBreakpointID; private DebugContext lastDebugContext; IRubyObject start(IRubyObject recv, Block block) { Ruby runtime = recv.getRuntime(); startCount++; IRubyObject result; if (started) { result = runtime.getFalse(); } else { IRubyObject nil = runtime.getNil(); lastThread = nil; started = true; setLastContext(nil); debugEventHook = new DebugEventHook(this, runtime); breakpoints = runtime.newArray(); catchpoints = RubyHash.newHash(runtime); threadsTable = new IdentityHashMap<RubyThread, Context>(); runtime.addEventHook(debugEventHook); result = runtime.getTrue(); } if (block.isGiven()) { try { return block.yield(runtime.getCurrentContext(), recv); } finally { stop(runtime); } } return result; } boolean stop(final Ruby runtime) { checkStarted(runtime); startCount if (startCount > 0) { return false; } runtime.tearDown(false); runtime.removeEventHook(debugEventHook); breakpoints = null; catchpoints = null; debugEventHook = null; started = false; threadsTable = null; return true; } /** see {@link RubyDebugger#debug_load} */ void load(IRubyObject recv, IRubyObject[] args) { Ruby rt = recv.getRuntime(); Arity.checkArgumentCount(rt, args, 1, 3); IRubyObject[] actual = Arity.scanArgs(rt, args, 1, 2); IRubyObject file = args[0]; IRubyObject stop = actual[1]; IRubyObject incrementStart = actual[2]; start(recv, Block.NULL_BLOCK); if (!incrementStart.isTrue()) { startCount } IRubyObject context = getCurrentContext(recv); DebugContext debugContext = (DebugContext) context.dataGetStruct(); debugContext.clearFrames(); if (stop.isTrue()) { debugContext.setStopNext(1); } try { RubyString fileText = file.convertToString(); rt.getLoadService().load(fileText.getByteList().toString(), false); } finally { stop(rt); } } IRubyObject getCurrentContext(IRubyObject recv) { checkStarted(recv); RubyThread thread = recv.getRuntime().getCurrentContext().getThread(); return contextForThread(thread); } DebugContext getCurrentDebugContext(IRubyObject recv) { checkStarted(recv); RubyThread thread = recv.getRuntime().getCurrentContext().getThread(); return threadContextLookup(thread, true).debugContext; } DebugContextPair threadContextLookup(final RubyThread thread, final boolean wantDebugContext) { checkStarted(thread); DebugContextPair ctxs = new DebugContextPair(); if (lastThread == thread && !lastContext.isNil()) { ctxs.context = (Context) lastContext; if (wantDebugContext) { ctxs.debugContext = lastDebugContext; } return ctxs; } synchronized (threadsTable) { ctxs.context = threadsTable.get(thread); if (ctxs.context == null) { ctxs.context = debugContextCreate(thread); threadsTable.put(thread, ctxs.context); } } DebugContext lDebugContext = (DebugContext) ctxs.context.dataGetStruct(); if (wantDebugContext) { ctxs.debugContext = lDebugContext; } lastThread = thread; setLastContext(ctxs.context); lastDebugContext = lDebugContext; return ctxs; } private Context contextForThread(final RubyThread thread) { return threadContextLookup(thread, false).context; } /** Calls {@link #checkStarted(Ruby)} with reciever's runtime. */ void checkStarted(final IRubyObject recv) { checkStarted(recv.getRuntime()); } void checkStarted(final Ruby runtime) { if (!started) { throw runtime.newRuntimeError("Debugger.start is not called yet."); } } private Context debugContextCreate(RubyThread thread) { DebugContext debugContext = new DebugContext(thread); // if (thread.getType() == thread.getRuntime().getClass(DebuggerDef.DEBUG_THREAD_NAME)) { if (thread.getType().getName().equals("Debugger::" + RubyDebugger.DEBUG_THREAD_NAME)) { debugContext.setIgnored(true); } RubyClass cContext = thread.getRuntime().getModule("Debugger").getClass("Context"); Context context = (Context) cContext.allocate(); context.dataWrapStruct(debugContext); return context; } IRubyObject getDebugContexts(IRubyObject self) { checkStarted(self); RubyArray newList = self.getRuntime().newArray(); RubyArray list = RubyThread.list(self); synchronized (threadsTable) { for (int i = 0; i < list.size(); i++) { RubyThread thread = (RubyThread) list.entry(i); Context context = contextForThread(thread); newList.add(context); } for (int i = 0; i < newList.size(); i++) { Context context = (Context) newList.entry(i); DebugContext debugContext = (DebugContext) context.dataGetStruct(); threadsTable.put(debugContext.getThread(), context); } } return newList; } void suspend(IRubyObject recv) { checkStarted(recv); for (Context context : getNonCurrentContexts(recv)) { context.suspend0(); } } void resume(IRubyObject recv) { checkStarted(recv); for (Context context : getNonCurrentContexts(recv)) { context.resume0(); } } private @SuppressWarnings("unchecked") Iterable<Context> getNonCurrentContexts(final IRubyObject recv) { RubyArray contexts; Context current; synchronized (threadsTable) { contexts = (RubyArray) getDebugContexts(recv); RubyThread thread = recv.getRuntime().getCurrentContext().getThread(); current = contextForThread(thread); } int len = contexts.getLength(); for (int i = 0; i < len; i++) { Context context = (Context)contexts.entry(i); if (context == current) { contexts.remove(i); } } return contexts; } boolean isStarted() { return started; } void setTracing(boolean tracing) { this.tracing = tracing; } boolean isTracing() { return tracing; } void setKeepFrameBinding(boolean keepFrameBinding) { this.keepFrameBinding = keepFrameBinding; } boolean isKeepFrameBinding() { return keepFrameBinding; } boolean isTrackFrameArgs() { return trackFrameArgs; } IRubyObject getBreakpoints() { return breakpoints; } IRubyObject addBreakpoint(IRubyObject recv, IRubyObject[] args) { checkStarted(recv); IRubyObject result = createBreakpointFromArgs(recv, args, ++lastBreakpointID); ((RubyArray) breakpoints).add(result); return result; } IRubyObject removeBreakpoint(IRubyObject recv, IRubyObject breakpointId) { checkStarted(recv); int id = RubyFixnum.fix2int(breakpointId); RubyArray breakpointsA = ((RubyArray) breakpoints); for(int i = 0; i < breakpointsA.size(); i++) { IRubyObject breakpoint = breakpointsA.entry(i); DebugBreakpoint debugBreakpoint = (DebugBreakpoint) breakpoint.dataGetStruct(); if(debugBreakpoint.getId() == id) { breakpointsA.remove(i); return breakpoint; } } return Util.nil(recv); } IRubyObject createBreakpointFromArgs(IRubyObject recv, IRubyObject[] args) { return createBreakpointFromArgs(recv, args, ++lastBreakpointID); } IRubyObject createBreakpointFromArgs(IRubyObject recv, IRubyObject[] args, int id) { Ruby rt = recv.getRuntime(); IRubyObject expr; if (Arity.checkArgumentCount(rt, args, 2, 3) == 3) { expr = args[2]; } else { expr = rt.getNil(); } IRubyObject source = args[0]; IRubyObject pos = args[1]; Type type = pos instanceof RubyFixnum ? DebugBreakpoint.Type.POS : DebugBreakpoint.Type.METHOD; if (type == DebugBreakpoint.Type.POS) { source = source.asString(); } else { pos = pos.asString(); } DebugBreakpoint debugBreakpoint = new DebugBreakpoint(); debugBreakpoint.setId(id); debugBreakpoint.setSource(source); debugBreakpoint.setType(type); if (type == DebugBreakpoint.Type.POS) { debugBreakpoint.getPos().setLine(RubyFixnum.num2int(pos)); } else { debugBreakpoint.getPos().setMethodName(((RubyString) pos).toString()); } debugBreakpoint.setExpr(expr.isNil() ? expr : (RubyString) expr); debugBreakpoint.setHitCount(0); debugBreakpoint.setHitValue(0); debugBreakpoint.setHitCondition(DebugBreakpoint.HitCondition.NONE); RubyClass cBreakpoint = rt.getModule("Debugger").getClass("Breakpoint"); IRubyObject breakpoint = cBreakpoint.allocate(); breakpoint.dataWrapStruct(debugBreakpoint); return breakpoint; } IRubyObject lastInterrupted(IRubyObject recv) { checkStarted(recv); IRubyObject result = Util.nil(recv); synchronized (threadsTable) { for (Map.Entry<RubyThread, Context> entry : threadsTable.entrySet()) { IRubyObject context = entry.getValue(); DebugContext debugContext = (DebugContext) context.dataGetStruct(); if (debugContext.getThnum() == debugEventHook.getLastDebuggedThnum()) { result = context; break; } } } return result; } void checkThreadContexts(Ruby runtime) { synchronized (threadsTable) { for (Iterator<Map.Entry<RubyThread, Context>> it = threadsTable.entrySet().iterator(); it.hasNext();) { Map.Entry<RubyThread, Context> entry = it.next(); if (entry.getKey().alive_p().isFalse()) { it.remove(); } } } } IRubyObject skip(IRubyObject recv, Block block) { if (! block.isGiven()) { throw recv.getRuntime().newArgumentError("called without a block"); } DebugContext context = getCurrentDebugContext(recv); try { context.setSkipped(true); return block.yield(recv.getRuntime().getCurrentContext(), recv.getRuntime().getNil()); } finally { context.setSkipped(false); } } boolean isPostMortem() { return postMortem; } void setPostMortem(boolean postMortem) { this.postMortem = postMortem; } boolean isDebug() { return debug; } void setDebug(boolean debug) { this.debug = debug; } /** TODO: Get rid of me - here because of hard rewrite from C. */ static final class DebugContextPair { Context context; DebugContext debugContext; } private void setLastContext(IRubyObject value) { lastContext = value; } void setTrackFrameArgs(boolean trackFrameArgs) { this.trackFrameArgs = trackFrameArgs; } RubyHash getCatchpoints() { return (RubyHash) catchpoints; } void addCatchpoint(IRubyObject recv, IRubyObject catchpoint) { Ruby runtime = recv.getRuntime(); checkStarted(recv); if (catchpoint.isNil()) { this.catchpoints = runtime.getNil(); } else { if (!runtime.getString().isInstance(catchpoint)) { throw runtime.newTypeError("value of checkpoint must be String"); } getCatchpoints().op_aset(runtime.getCurrentContext(), catchpoint.dup(), RubyFixnum.zero(runtime)); } } }
// $Id: PlaceManager.java,v 1.15 2001/10/09 17:18:44 mdb Exp $ package com.threerings.cocktail.party.server; import java.util.HashMap; import java.util.Properties; import com.threerings.cocktail.cher.dobj.*; import com.threerings.cocktail.party.Log; import com.threerings.cocktail.party.data.*; /** * The place manager is the server-side entity that handles all * place-related interaction. It subscribes to the place object and reacts * to message and other events. Behavior specific to a place (or class of * places) should live in the place manager. An intelligently constructed * hierarchy of place manager classes working in concert with invocation * services should provide the majority of the server-side functionality * of an application built on the Cocktail platform. * * <p> The base place manager class takes care of the necessary * interactions with the place registry to manage place registration. It * handles the place-related component of chatting. It also provides the * basis for place-based access control. * * <p> A derived class is expected to handle initialization, cleanup and * operational functionality via the calldown functions {@link * #didStartup}, {@link #willShutdown}, and {@link #didShutdown} as well * as through additions to {@link #handleEvent}. */ public class PlaceManager implements Subscriber { /** * Returns a reference to our place configuration object. */ public PlaceConfig getConfig () { return _config; } /** * Returns the place object managed by this place manager. */ public PlaceObject getPlaceObject () { return _plobj; } /** * A place manager derived class is likely to have a corresponding * derived class of {@link PlaceObject} that it will be managing. * Derived classes should override this method and return the class * object for the place object derived class they desire to use. The * place registry will use this method to create the proper place * object during the place creation process. * * @return the class of the class, derived from {@link PlaceObject}, * that this manager wishes to manage. * * @see PlaceRegistry#createPlace */ protected Class getPlaceObjectClass () { return PlaceObject.class; } /** * Called by the place registry after creating this place manager. */ public void init (PlaceRegistry registry, PlaceConfig config) { _registry = registry; _config = config; // let derived classes do initialization stuff didInit(); } /** * Called after this place manager has been initialized with its * configuration information but before it has been started up with * its place object reference. Derived classes can override this * function and perform any basic initialization that they desire. * They should of course be sure to call <code>super.didInit()</code>. */ protected void didInit () { } /** * Called by the place manager after the place object has been * successfully created. */ public void startup (PlaceObject plobj) { // keep track of this _plobj = plobj; // configure the occupant info set plobj.occupantInfo.setElementType(getOccupantInfoClass()); // we'll want to be included among the place object's subscribers; // we know that we can call addSubscriber() directly because the // place manager is doing all of our initialization on the dobjmgr // thread plobj.addSubscriber(this); // let our derived classes do their thang didStartup(); } /** * Derived classes should override this (and be sure to call * <code>super.didStartup()</code>) to perform any startup time * initialization. The place object will be available by the time this * method is executed. */ protected void didStartup () { } // not called at present but will eventually be part of the shutdown // and cleanup process protected void willShutdown () { } // not called at present but will eventually be part of the shutdown // and cleanup process protected void didShutdown () { } /** * When the manager starts up, it configures its place object occupant * info set by setting the type of occupant info objects it will * contain. Managers that wish to use derived occupant info classes * should override this function and return a reference to their * derived class. */ protected Class getOccupantInfoClass () { return OccupantInfo.class; } /** * Builds an occupant info record for the specified body object. This * is called by the location services when a body enters a place. It * should not be overridden by derived classes, they should override * {@link #populateOccupantInfo}, which is set up for that sort of * thing. */ public OccupantInfo buildOccupantInfo (BodyObject body) { // create a new occupant info instance try { OccupantInfo info = (OccupantInfo) getOccupantInfoClass().newInstance(); populateOccupantInfo(info, body); return info; } catch (Exception e) { Log.warning("Failure building occupant info " + "[body=" + body + "]."); Log.logStackTrace(e); return null; } } /** * Derived classes should override this method if they are making use * of a derived occupant info class. They should call the super * implementation and then populate the occupant info fields in their * extended object. */ protected void populateOccupantInfo (OccupantInfo info, BodyObject body) { // the base occupant info is only their username info.bodyOid = new Integer(body.getOid()); info.username = body.username; } /** * Called when a body object enters this place. */ protected void bodyEntered (int bodyOid) { Log.info("Body entered [ploid=" + _plobj.getOid() + ", oid=" + bodyOid + "]."); } /** * Called when a body object leaves this place. */ protected void bodyLeft (int bodyOid) { Log.info("Body left [ploid=" + _plobj.getOid() + ", oid=" + bodyOid + "]."); // if their occupant info hasn't been removed (which may be the // case if they logged off rather than left via a MoveTo request), // we need to get it on out of here Object key = new Integer(bodyOid); if (_plobj.occupantInfo.containsKey(key)) { _plobj.removeFromOccupantInfo(key); } } /** * Registers a particular message handler instance to be used when * processing message events with the specified name. * * @param name the message name of the message events that should be * handled by this handler. * @param handler the handler to be registered. */ public void registerMessageHandler (String name, MessageHandler handler) { // create our handler map if necessary if (_msghandlers == null) { _msghandlers = new HashMap(); } _msghandlers.put(name, handler); } // nothing doing public void objectAvailable (DObject object) { } // nothing doing public void requestFailed (int oid, ObjectAccessException cause) { } /** * Derived classes can override this to handle events, but they must * be sure to pass unknown events up to their super class. */ public boolean handleEvent (DEvent event, DObject target) { // if this is a message event, see if we have a handler for it if (event instanceof MessageEvent) { MessageEvent mevt = (MessageEvent)event; MessageHandler handler = null; if (_msghandlers != null) { handler = (MessageHandler)_msghandlers.get(mevt.getName()); } if (handler != null) { handler.handleEvent(mevt, (PlaceObject)target); return true; } } else if (event instanceof ObjectAddedEvent) { ObjectAddedEvent oae = (ObjectAddedEvent)event; if (oae.getName().equals(PlaceObject.OCCUPANTS)) { bodyEntered(oae.getOid()); return true; } } else if (event instanceof ObjectRemovedEvent) { ObjectRemovedEvent ore = (ObjectRemovedEvent)event; if (ore.getName().equals(PlaceObject.OCCUPANTS)) { bodyLeft(ore.getOid()); return true; } } Log.warning("Unhandled place event " + event + "."); return true; } /** * Generates a string representation of this manager. Does so in a way * that makes it easier for derived classes to add to the string * representation. * * @see #toString(StringBuffer) */ public String toString () { StringBuffer buf = new StringBuffer(); buf.append("["); toString(buf); buf.append("]"); return buf.toString(); } /** * An extensible way to add to the string representation of this * class. Override this (being sure to call super) and append your * info to the buffer. */ protected void toString (StringBuffer buf) { buf.append("place=").append(_plobj); buf.append(", config=").append(_config); } /** * An interface used to allow the registration of standard message * handlers to be invoked by the place manager when particular types * of message events are received. */ protected static interface MessageHandler { /** * Invokes this message handler on the supplied event. * * @param event the message event received. * @param target the place object on which the message event was * received. */ public void handleEvent (MessageEvent event, PlaceObject target); } /** A reference to the place object that we manage. */ protected PlaceObject _plobj; /** A reference to the configuration for our place. */ protected PlaceConfig _config; /** A reference to the place registry with which we're registered. */ protected PlaceRegistry _registry; /** Message handlers are used to process message events. */ protected HashMap _msghandlers; }
/* * $Id: AuUtil.java,v 1.41 2013-10-16 23:17:06 fergaloy-sf Exp $ */ package org.lockss.plugin; import java.net.*; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.*; import org.lockss.app.*; import org.lockss.config.*; import org.lockss.util.*; import org.lockss.daemon.*; import org.lockss.jetty.CuResourceHandler; import org.lockss.crawler.*; import org.lockss.state.*; import org.lockss.poller.*; import org.lockss.repository.*; import org.lockss.plugin.definable.*; import org.lockss.plugin.exploded.*; /** * Static AU- and plugin-related utility methods. These might logically * belong in either Plugin or ArchivalUnit, but they are defined entirely * in terms of already-public methods, so need not be implemented by plugin * writers, thus there's no need to muddy those interfaces. */ public class AuUtil { private static Logger log = Logger.getLogger("AuUtil"); /** The default poll protocol to use, unless otherwise overridden by the * Archival Unit's poll_protocol config param.= */ public static final String PARAM_POLL_PROTOCOL_VERSION = Configuration.PREFIX + "poll.defaultPollProtocol"; private static final int DEFAULT_POLL_PROTOCOL_VERSION = Poll.V3_PROTOCOL; // The parser of the formatted date in the CU property 'Date'. // SimpleDateFormat is not thread-safe, so this member requires synchronized // access. private static DateFormat CU_PROPERTY_DATE_PARSER = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz"); public static LockssDaemon getDaemon(ArchivalUnit au) { return au.getPlugin().getDaemon(); } /** * Return the AuState object for the AU * @param au the AU * @return the AuState */ public static AuState getAuState(ArchivalUnit au) { NodeManager nodeManager = getDaemon(au).getNodeManager(au); return nodeManager.getAuState(); } /** * Return the AuSuspectUrlVersions object for the AU * @param au the AU * @return the AuSuspectUrlVersions */ public static AuSuspectUrlVersions getSuspectUrlVersions(ArchivalUnit au) { LockssRepository repo = getDaemon(au).getLockssRepository(au); return repo.getSuspectUrlVersions(au); } /** * Update the stored record of suspect versions for the AU * @param au the AU * @param asuv the AuSuspectUrlVersions object to store */ public static void saveSuspectUrlVersions(ArchivalUnit au, AuSuspectUrlVersions asuv) throws SerializationException { LockssRepository repo = getDaemon(au).getLockssRepository(au); repo.storeSuspectUrlVersions(au, asuv); } /** * Return the AuSuspectUrlVersions object for the AU * @param au the AU * @return the AuSuspectUrlVersions */ public static boolean hasSuspectUrlVersions(ArchivalUnit au) { LockssRepository repo = getDaemon(au).getLockssRepository(au); return repo.hasSuspectUrlVersions(au); } public static AuNodeImpl getAuRepoNode(ArchivalUnit au) { LockssDaemon daemon = getDaemon(au); LockssRepository repo = daemon.getLockssRepository(au); try { return(AuNodeImpl)repo.getNode(au.getAuCachedUrlSet().getUrl()); } catch (MalformedURLException e) { throw new RuntimeException(e); } } /** * @param au An ArchivalUnit * @param url A URL * @return The RepositoryNode representing the URL in the given AU. * * @throws MalformedURLException if the URL cannot be parsed. */ public static RepositoryNode getRepositoryNode(ArchivalUnit au, String url) throws MalformedURLException { LockssDaemon daemon = getDaemon(au); LockssRepository repo = daemon.getLockssRepository(au); return repo.getNode(url); } /** * Return the size of the AU, calculating it if necessary. * @param au the AU * @return the AU's total content size. */ public static long getAuContentSize(ArchivalUnit au, boolean calcIfUnknown) { LockssDaemon daemon = getDaemon(au); RepositoryNode repoNode = getAuRepoNode(au); return repoNode.getTreeContentSize(null, calcIfUnknown); } /** * Return the disk space used by the AU, including all overhead, * calculating it if necessary. * @param au the AU * @param calcIfUnknown if true, disk usage will calculated if unknown * (time consumeing) * @return the AU's disk usage in bytes. */ public static long getAuDiskUsage(ArchivalUnit au, boolean calcIfUnknown) { LockssDaemon daemon = getDaemon(au); AuNodeImpl repoNode = getAuRepoNode(au); return repoNode.getDiskUsage(calcIfUnknown); } /** Return a string appropriate to use as a thread name for the specified * process working on the au. */ public static String getThreadNameFor(String procName, ArchivalUnit au) { StringBuffer sb = new StringBuffer(); sb.append(procName); sb.append(": "); sb.append(StringUtil.toUnaccented(au.getName())); return sb.toString(); } public static String getConfigUserMessage(ArchivalUnit au) { // XXX change this to not require string to be copied into each AU TypedEntryMap map = au.getProperties(); String str = map.getString(DefinableArchivalUnit.KEY_AU_CONFIG_USER_MSG, null); if (str == null) { return null; } return str; } /** Return true if the supplied AU config appears to be compatible with * the plugin. Checks only that all required (definitional) parameters * have values. */ public static boolean isConfigCompatibleWithPlugin(Configuration config, Plugin plugin) { for (Iterator iter = plugin.getAuConfigDescrs().iterator(); iter.hasNext();) { ConfigParamDescr descr = (ConfigParamDescr)iter.next(); String key = descr.getKey(); String val = config.get(key); if (val == null) { if (descr.isDefinitional()) { log.debug(descr + " is definitional, absent from " + config); return false; } } else { if (!descr.isValidValueOfType(val)) { log.debug(val + " is not a valid value of type " + descr); return false; } } } return true; } /** Search titles belonging to <i>plugin</i> in the title DB for one that * matches the config. * @param config an AU config (unqualified) * @param plugin a plugin * @return the matching TitleConfig, or null if none found */ // Unit test for this is in TestBaseArchivalUnit public static TitleConfig findTitleConfig(Configuration config, Plugin plugin) { if (plugin.getSupportedTitles() == null) { return null; } for (Iterator iter = plugin.getSupportedTitles().iterator(); iter.hasNext(); ) { String title = (String)iter.next(); TitleConfig tc = plugin.getTitleConfig(title); if (tc != null && tc.matchesConfig(config) && tc.isSingleAu(plugin)) { return tc; } } return null; } // XXX Giving clients access to the plugin definition map allows them to // modify it, should be replaced by something safer. private static TypedEntryMap EMPTY_DEF_MAP = new TypedEntryMap(); public static TypedEntryMap getPluginDefinition(ArchivalUnit au) { return getPluginDefinition(au.getPlugin()); } public static TypedEntryMap getPluginDefinition(Plugin plug) { if (plug instanceof DefinablePlugin) { return ((DefinablePlugin)plug).getDefinitionMap(); } else { return EMPTY_DEF_MAP; } } public static boolean isClosed(ArchivalUnit au) { return getBoolValue(getAuParamOrTitleDefault(au, ConfigParamDescr.AU_CLOSED), false); } /** * Returns true if the AU has ever successfully completed a new content * crawl */ public static boolean hasCrawled(ArchivalUnit au) { return getAuState(au).getLastCrawlTime() >= 0; } public static boolean okDeleteExtraFiles(ArchivalUnit au) { return !(au instanceof ExplodedArchivalUnit); } public static boolean isDeleteExtraFiles(ArchivalUnit au, boolean dfault) { return getPluginDefinition(au) .getBoolean(DefinablePlugin.KEY_PLUGIN_DELETE_EXTRA_FILES, dfault); } public static boolean isRepairFromPublisherWhenTooClose(ArchivalUnit au, boolean dfault) { return getPluginDefinition(au) .getBoolean(DefinablePlugin.KEY_REPAIR_FROM_PUBLISHER_WHEN_TOO_CLOSE, dfault); } public static boolean isPubDown(ArchivalUnit au) { return isPubNever(au) || getBoolValue(getAuParamOrTitleDefault(au, ConfigParamDescr.PUB_DOWN), false); } public static boolean isPubNever(ArchivalUnit au) { return getBoolValue(getAuParamOrTitleDefault(au, ConfigParamDescr.PUB_NEVER), false); } public static boolean isPubDown(TitleConfig tc) { return isPubNever(tc) || getBoolValue(getTitleDefault(tc, ConfigParamDescr.PUB_DOWN), false); } public static boolean isPubNever(TitleConfig tc) { return getBoolValue(getTitleDefault(tc, ConfigParamDescr.PUB_NEVER), false); } public static String getPollVersion(ArchivalUnit au) { Plugin plugin = au.getPlugin(); String res = plugin.getFeatureVersion(Plugin.Feature.Poll); if (res == null) { res = plugin.getVersion(); } return res; } public static int getProtocolVersion(ArchivalUnit au) { return getIntValue(getAuParamOrTitleDefault(au, ConfigParamDescr.PROTOCOL_VERSION), CurrentConfig.getIntParam(PARAM_POLL_PROTOCOL_VERSION, DEFAULT_POLL_PROTOCOL_VERSION)); } /** Return true iff the AU's plugin's implementation version of the named * Feature is the same as that recorded in the AU's AuState the last time * the feature was invoked. (I.e., if false, the plugin has changed * since the AU was processed, so the AU may need to be reprocessed */ public static boolean isCurrentFeatureVersion(ArchivalUnit au, Plugin.Feature feat) { Plugin plugin = au.getPlugin(); AuState aus = AuUtil.getAuState(au); return StringUtil.equalStrings(plugin.getFeatureVersion(feat), aus.getFeatureVersion(feat)); } public static class AuProxyInfo { String host = null; int port; boolean isAuOverride = false; public String getHost() { return host; } public int getPort() { return port; } public boolean isAuOverride() { return isAuOverride; } public boolean equals(Object o) { if (o instanceof AuProxyInfo) { AuProxyInfo other = (AuProxyInfo)o; return StringUtil.equalStringsIgnoreCase(host, other.host) && port == other.port; } return false; } } /** * Returns the proxy info specified by au's proxy spec if it is * of the form "host:port" for the proxy host and port. Otherwise, * return the proxy host and port from the global configuration. * * @param au the AU for the proxy spec from an AU of the form "host:port" * or <code>null</code> if no override proxy is specified. * @return the proxy info from either the override or the global config * proxy settings */ public static AuProxyInfo getAuProxyInfo(ArchivalUnit au) { return getAuProxyInfo(au, ConfigManager.getCurrentConfig()); } /** * Returns the proxy info specified by auProxySpec if it is * of the form "host:port" for the proxy host and port. Otherwise, * return the proxy host and port from the global configuration. * * @param auProxySpec the proxy spec from an AU of the form "host:port" * or <code>null</code> if no override proxy is specified. * @return the proxy info from either the override or the global proxy * settings */ public static AuProxyInfo getAuProxyInfo(String auProxySpec) { return getAuProxyInfo(auProxySpec, ConfigManager.getCurrentConfig()); } /** * Returns the proxy info specified by au's proxy spec if it is * of the form "host:port" for the proxy host and port. Otherwise, * return the proxy host and port from the specified configuration. * * @param au the AU for the proxy spec from an AU of the form "host:port" * or <code>null</code> if no override proxy is specified. * @param config the configuration specifying the global proxy host and port * @return the proxy info from either the override or the config proxy * settings */ public static AuProxyInfo getAuProxyInfo(ArchivalUnit au, Configuration config) { // In RegistryArchivalUnit, CRAWL_PROXY is set in au.getProperties(), // not in au.getConfiguration(). Unless/until // getAuParamOrTitleDefault() is changed, we need to look in // au.getProperties() ourselves String auProxySpec = getStringValue(getAuParamOrTitleDefault(au, ConfigParamDescr.CRAWL_PROXY), null); auProxySpec = au.getProperties().getString(ConfigParamDescr.CRAWL_PROXY.getKey(), auProxySpec); return getAuProxyInfo(auProxySpec, config); } /** * Returns the proxy info specified by auProxySpec if it is * of the form "host:port" for the proxy host and port. Otherwise, * return the proxy host and port from the specified configuration. * * @param auProxySpec the proxy spec from an AU of the form "host:port" * or <code>null</code> if no override proxy is specified. * @param config the configuration specifying the global proxy host and port * @return the proxy info from either the override or the config proxy * settings */ public static AuProxyInfo getAuProxyInfo(String auProxySpec, Configuration config) { AuProxyInfo global = new AuProxyInfo(); if (config.getBoolean(BaseCrawler.PARAM_PROXY_ENABLED, BaseCrawler.DEFAULT_PROXY_ENABLED)) { global.host = config.get(BaseCrawler.PARAM_PROXY_HOST); global.port = config.getInt(BaseCrawler.PARAM_PROXY_PORT, BaseCrawler.DEFAULT_PROXY_PORT); if (StringUtil.isNullString(global.host) || global.port <= 0) { global.host = null; global.port = 0; } } if (!StringUtil.isNullString(auProxySpec)) { AuProxyInfo res = new AuProxyInfo(); try { HostPortParser hpp = new HostPortParser(auProxySpec); res.host = hpp.getHost(); if (res.host != null) { res.port = hpp.getPort(); } } catch (HostPortParser.InvalidSpec e) { log.warning("Illegal AU crawl_proxy: " + auProxySpec, e); return global; } res.isAuOverride = !res.equals(global); return res; } return global; } /** Return an attribute value from the AU's title DB entry, if any */ public static String getTitleAttribute(ArchivalUnit au, String key) { TitleConfig tc = au.getTitleConfig(); if (tc != null) { Map attrs = tc.getAttributes(); if (attrs != null) { return (String)attrs.get(key); } } return null; } /** Return an attribute value from the AU's title DB entry, if any */ public static String getTitleAttribute(ArchivalUnit au, String key, String dfault) { String res = getTitleAttribute(au, key); return (res != null) ? res : dfault; } public static int getSubstanceTestThreshold(ArchivalUnit au) { String key = ConfigParamDescr.CRAWL_TEST_SUBSTANCE_THRESHOLD.getKey(); String thresh = getTitleAttribute(au, key); if (!StringUtil.isNullString(thresh)) { try { return Integer.parseInt(thresh); } catch (NumberFormatException e) { log.error("Illegal crawl test threshold: " + thresh + ", performing regular crawl"); } } Configuration auConfig = au.getConfiguration(); if (auConfig.containsKey(key)) { try { return auConfig.getInt(key); } catch (Configuration.InvalidParam e) { log.error("Illegal crawl test threshold: " + auConfig.get(key) + ", performing regular crawl"); } } return -1; } public static boolean getBoolValue(Object value, boolean dfault) { if (value instanceof Boolean) { return ((Boolean)value).booleanValue(); } return dfault; } public static int getIntValue(Object value, int dfault) { if (value instanceof Integer) { return ((Integer)value).intValue(); } return dfault; } public static String getStringValue(Object value, String dfault) { if (value instanceof String) { return (String)value; } return dfault; } /** Return the value of a config param either from the AU config or from * a default value in its TitleConfig */ // This should probably look in au.getProperties() instead of // au.getConfiguration() so it sees inferred params. That's a little // harder because it's typed. public static Object getAuParamOrTitleDefault(ArchivalUnit au, ConfigParamDescr cpd) { String key = cpd.getKey(); String val = null; Configuration auConfig = au.getConfiguration(); if (auConfig != null) { val = auConfig.get(key); if (!StringUtil.isNullString(val)) { return getValueOfType(val, cpd); } } TitleConfig tc = au.getTitleConfig(); if (tc != null) { return getTitleDefault(tc, cpd); } return null; } public static Object getValueOfType(String valstr, ConfigParamDescr cpd) { if (valstr == null) { return null; } try { return cpd.getValueOfType(valstr); } catch (ConfigParamDescr.InvalidFormatException e) { return null; } } public static Object getTitleDefault(TitleConfig tc, ConfigParamDescr cpd) { ConfigParamAssignment cpa = tc.findCpa(cpd); if (cpa != null) { return getValueOfType(cpa.getValue(), cpd); } return null; } /** Call release() on the CachedUrl, ignoring any errors */ public static void safeRelease(CachedUrl cu) { try { cu.release(); } catch (Exception e) {} } /** Return the CachedUrl for a content node, or null if not a content * node */ public static CachedUrl getCu(CachedUrlSetNode node) { switch (node.getType()) { case CachedUrlSetNode.TYPE_CACHED_URL_SET: CachedUrlSet cus = (CachedUrlSet)node; return cus.getArchivalUnit().makeCachedUrl(cus.getUrl()); case CachedUrlSetNode.TYPE_CACHED_URL: return (CachedUrl)node; } return null; } /** Return the charset specified in the UC's response headers, or the * default charset. Never returns null. */ public static String getCharsetOrDefault(UrlCacher uc) { CIProperties props = uc.getUncachedProperties(); if (props == null) { return Constants.DEFAULT_ENCODING; } String ctype = props.getProperty(CachedUrl.PROPERTY_CONTENT_TYPE); return HeaderUtil.getCharsetOrDefaultFromContentType(ctype); } /** Return true iff the AU specifies archive file types whose memebers * should be accessible as CachedUrls */ public static boolean hasArchiveFileTypes(ArchivalUnit au) { return au.getArchiveFileTypes() != null; } /** * Provides the creation time of an Archival Unit. * * @param au * An ArchivalUnit with the Archival Unit. * @return a long with the creation time of the Archival Unit. */ public static long getAuCreationTime(ArchivalUnit au) { final String DEBUG_HEADER = "getAuCreationTime(): "; if (log.isDebug2()) log.debug2(DEBUG_HEADER + "au = " + au); // Check whether the Archival Unit does not exist. if (au == null) { // Yes: Report the problem. throw new NullPointerException("Archival Unit is null"); } long auCreationTime = 0; // Get the Archival Unit state. AuState auState = AuUtil.getAuState(au); // Check that the Archival Unit state exists. if (auState != null) { // Yes: Get the Archival Unit creation time. auCreationTime = auState.getAuCreationTime(); } else { // No: Report the problem. throw new IllegalArgumentException("Archival Unit state is null"); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "auCreationTime = " + auCreationTime); return auCreationTime; } /** * Provides the earliest fetch time of a collection of URLs of an Archival * Unit. * * @param au * An ArchivalUnit with the Archival Unit. * @param urls * A Collection<String> with the URLs. * @return a long with the the earliest fetch time of any of the passed URLs. */ public static long getAuUrlsEarliestFetchTime(ArchivalUnit au, Collection<String> urls) { final String DEBUG_HEADER = "getAuUrlsEarliestFetchTime(): "; // Check whether the Archival Unit does not exist. if (au == null) { // Yes: Report the problem. throw new NullPointerException("Archival Unit is null"); } // Check whether the Archival Unit does not exist. if (urls == null) { // Yes: Report the problem. throw new NullPointerException("No URLs"); } long fetchTime = 0L; // Loop through all the URLs. for (String url : urls) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "url = " + url); // Get the fetch time of this URL. long newFetchTime = getUrlFetchTime(au, url); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "newFetchTime = " + newFetchTime); // Check whether it is earlier than any of the previously found fetch // times. if (newFetchTime > 0 && (fetchTime == 0 || newFetchTime < fetchTime)) { // Yes: Remember it. fetchTime = newFetchTime; } } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "fetchTime = " + fetchTime); return fetchTime; } /** * Provides the fetch time of a URL. * * @param au * An ArchivalUnit with the Archival Unit. * @param url * A String with the URL. * @return a long with the fetch time of the URL. */ public static long getUrlFetchTime(ArchivalUnit au, String url) { final String DEBUG_HEADER = "getUrlFetchTime(): "; if (log.isDebug2()) { log.debug2(DEBUG_HEADER + "au = " + au); log.debug2(DEBUG_HEADER + "url = " + url); } CachedUrl cachedUrl = null; long fetchTime = 0; // Get the cached URL. try { cachedUrl = au.makeCachedUrl(url); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "cachedUrl = " + cachedUrl); // Get the first version of the cached URL. cachedUrl = cachedUrl.getCuVersion(1); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "cachedUrl = " + cachedUrl); // Get its properties. CIProperties cuProperties = cachedUrl.getProperties(); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "cuProperties = " + cuProperties); // Try to get the best fetch time. String origFetchTimeAsString = cuProperties.getProperty(CuResourceHandler.ORIG_HEADER_PREFIX + CachedUrl.PROPERTY_FETCH_TIME); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "origFetchTimeAsString = " + origFetchTimeAsString); // Check whether a fetch time was obtained. if (origFetchTimeAsString != null) { try { // Yes: Try to parse it as a number. fetchTime = Long.parseLong(origFetchTimeAsString); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTime = " + fetchTime); } catch (NumberFormatException nfe) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "origFetchTimeAsString is not a number"); } } // Check whether a fetch time was not obtained. if (fetchTime == 0) { // Yes: Try to use the fetch time property. String fetchTimeAsString = cuProperties.getProperty(CachedUrl.PROPERTY_FETCH_TIME); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTimeAsString = " + fetchTimeAsString); // Check whether a fetch time was obtained. if (fetchTimeAsString != null) { try { // Yes: Try to parse it as a number. fetchTime = Long.parseLong(fetchTimeAsString); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTime = " + fetchTime); } catch (NumberFormatException nfe) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTimeAsString is not a number"); } } // Try to use the 'Date' property. String dateAsString = cuProperties.getProperty("Date"); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "dateAsString = " + dateAsString); // Check whether a fetch time was obtained. if (dateAsString != null) { try { // Yes: Try to parse it as a number. long date = parseStringDate(dateAsString).getTime(); if (log.isDebug3()) log.debug3(DEBUG_HEADER + "date = " + date); // Use it if it's the only time we have or it's lower than the other // one. if (fetchTime == 0 || date < fetchTime) { fetchTime = date; if (log.isDebug3()) log.debug3(DEBUG_HEADER + "fetchTime = " + fetchTime); } } catch (ParseException nfe) { if (log.isDebug3()) log.debug3(DEBUG_HEADER + "dateAsString is not a number"); } } } } catch (Exception e) { log.info("Exception caught ", e); if (log.isDebug2()) log.debug2(DEBUG_HEADER + "fetchTime = " + fetchTime); return fetchTime; } finally { // Release the cached URL. safeRelease(cachedUrl); } if (log.isDebug2()) log.debug2(DEBUG_HEADER + "fetchTime = " + fetchTime); return fetchTime; } /** * Parses a text date in a thread-safe way. * * @param dateAsString * A String with the date to be parsed. * @return a Date with the parsed date. * @throws ParseException * if there are problems parsing the date. */ private static Date parseStringDate(String dateAsString) throws ParseException { synchronized (CU_PROPERTY_DATE_PARSER) { return CU_PROPERTY_DATE_PARSER.parse(dateAsString); } } }
package com.dafrito.lua.script; import static org.junit.Assert.assertEquals; import lua.LuaLibrary; import lua.LuaLibrary.lua_State; import org.junit.Before; import org.junit.Test; public class LuaTableTest { LuaLibrary lua = LuaLibrary.INSTANCE; private LuaBindings b; private LuaScriptContext ctx; @Test public void getAndPutAValueIntoATable() throws Exception { lua.lua_createtable(b.getState(), 0, 0); LuaTable t = new LuaTable(new LuaReference(b)); t.set(1, "No time"); assertEquals("No time", t.get(1)); } @Before public void setup() { ctx = new LuaScriptContext(); b = ctx.getGlobals(); } class LuaTable { private LuaBindings b; private LuaReference ref; private lua_State s; public LuaTable(LuaReference ref) { this.b = ref.getBindings(); this.ref = ref; this.s = b.getState(); } public Object get(Object k) { ref.get(); b.toLua(k); lua.lua_gettable(s, -2); Object v = b.fromLua(-1); lua.lua_settop(s, -2); return v; } public void set(Object k, Object v) { ref.get(); b.toLua(k); b.toLua(v); lua.lua_settable(s, -3); lua.lua_settop(s, -2); } } class LuaReference { private final LuaBindings b; private int ref; public LuaReference(LuaBindings b) { this.b = b; this.ref = lua.luaL_ref(b.getState(), LuaLibrary.LUA_REGISTRYINDEX); check(); } public LuaBindings getBindings() { return this.b; } public void get() { check(); lua.lua_rawgeti(b.getState(), LuaLibrary.LUA_REGISTRYINDEX, this.ref); } private void check() { if (this.isClosed()) { throw new RuntimeException(); } } public boolean isClosed() { return this.ref == LuaLibrary.LUA_REFNIL; } public void close() { if (this.isClosed()) { return; } lua.luaL_unref(b.getState(), LuaLibrary.LUA_REGISTRYINDEX, this.ref); this.ref = LuaLibrary.LUA_REFNIL; } } }
package rzehan.gui.sample; import javafx.event.ActionEvent; import javafx.fxml.FXML; import javafx.scene.control.Label; import rzehan.shared.Os; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import java.util.logging.Logger; public class Controller { private static final Logger LOGGER = Logger.getLogger(Controller.class.getSimpleName()); private static final int MAX_OUTPUT_LENGTH = 100; private static final String MC_FILE = "/home/martin/zakazky/NKP-validator/data/monografie_1.2/b50eb6b0-f0a4-11e3-b72e-005056827e52/mastercopy/mc_b50eb6b0-f0a4-11e3-b72e-005056827e52_0001.jp2"; @FXML Label osLabel; @FXML Label logLabel; @FXML Label detectJpylyzerVersionLabel; @FXML Label runJpylyzerLabel; @FXML Label detectJhoveVersionLabel; @FXML Label runJhoveLabel; @FXML Label detectImageMagickVersionLabel; @FXML Label installImageMagickLabel; @FXML Label runImageMagickLabel; @FXML Label detectKakaduVersionLabel; @FXML Label runKakaduLabel; private final Os os; public Controller() { os = Os.detectOs(); LOGGER.info("os: " + os.toString()); } public void initialize() { osLabel.setText(os.toReadableString()); } private CmlCommandResult executeCliCommand(String command) throws IOException, InterruptedException { Process pr = Runtime.getRuntime().exec(command); //read standard error stream BufferedReader stderrReader = new BufferedReader(new InputStreamReader(pr.getErrorStream())); StringBuilder stderrBuilder = new StringBuilder(); if (stderrReader != null) { String line; while ((line = stderrReader.readLine()) != null) { stderrBuilder.append(line).append('\n'); } stderrReader.close(); } //read standard output stream BufferedReader stdoutReader = new BufferedReader(new InputStreamReader(pr.getInputStream())); StringBuilder stdoutBuilder = new StringBuilder(); if (stdoutReader != null) { String line; while ((line = stdoutReader.readLine()) != null) { stdoutBuilder.append(line).append('\n'); } stdoutReader.close(); } int exitValue = pr.waitFor(); return new CmlCommandResult(exitValue, stdoutBuilder.toString(), stderrBuilder.toString()); } public void logProperties(ActionEvent actionEvent) { StringBuilder builder = new StringBuilder(); Properties properties = System.getProperties(); List<String> propertyNames = new ArrayList<String>(); propertyNames.addAll(properties.stringPropertyNames()); Collections.sort(propertyNames); for (String propertyName : propertyNames) { String line = String.format("%s: %s", propertyName, properties.getProperty(propertyName)); builder.append(line).append("\n"); //LOGGER.info(); } logLabel.setText(builder.toString()); installImageMagick(actionEvent); } public void detectJpylyzerVersion(ActionEvent actionEvent) { detectJpylyzerVersionLabel.setText("checking jplyzer ..."); try { CmlCommandResult output = executeCliCommand("jpylyzer --version"); output.print(); //jediny radek STDERR obsahuje verzi typu "1.17.0" detectJpylyzerVersionLabel.setText(output.getStderr()); } catch (IOException e) { //program probably does not exist //e.printStackTrace() throws IOEXception on Windows // e.printStackTrace(); detectJpylyzerVersionLabel.setText("not found"); } catch (InterruptedException e) { detectJpylyzerVersionLabel.setText("process interrupted"); //e.printStackTrace(); } } public void runJpylyzer(ActionEvent actionEvent) { String imageFile = MC_FILE; runJpylyzerLabel.setText("running jplyzer ..."); try { CmlCommandResult output = executeCliCommand("jpylyzer jp2In " + imageFile); output.print(); //v STDERR nesmyslna hlaska "jp2In does not exist" String outStr = output.getStdout(); outStr = outStr.replace("\n", ""); int length = Math.min(outStr.length(), MAX_OUTPUT_LENGTH); runJpylyzerLabel.setText(outStr.substring(0, length) + " ..."); } catch (IOException e) { //program probably does not exist //e.printStackTrace() throws IOEXception on Windows //e.printStackTrace(); runJpylyzerLabel.setText("not found"); } catch (InterruptedException e) { runJpylyzerLabel.setText("process interrupted"); //e.printStackTrace(); } } public void detectJhoveVersion(ActionEvent actionEvent) { detectJhoveVersionLabel.setText("checking jhove ..."); try { CmlCommandResult output = executeCliCommand("jhove --version"); output.print(); //prvni radek STDOUT obsahuje text typu "Jhove (Rel. 1.6, 2011-01-04)" String version = output.getStdout(); String firstLine = version.split("\n")[0]; detectJhoveVersionLabel.setText(firstLine); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); detectJhoveVersionLabel.setText("not found"); } catch (InterruptedException e) { detectJhoveVersionLabel.setText("process interrupted"); //e.printStackTrace(); } } public void runJhove(ActionEvent actionEvent) { String imageFile = MC_FILE; runJhoveLabel.setText("running jhove ..."); try { CmlCommandResult output = executeCliCommand("jhove -h XML -m jpeg2000-hul -k " + imageFile); output.print(); String outStr = output.getStdout(); outStr = outStr.replace("\n", ""); int length = Math.min(outStr.length(), 150); runJhoveLabel.setText(outStr.substring(0, length) + " ..."); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); runJhoveLabel.setText("not found"); } catch (InterruptedException e) { runJhoveLabel.setText("process interrupted"); //e.printStackTrace(); } } public void detectImageMagickVersion(ActionEvent actionEvent) { detectImageMagickVersionLabel.setText("checking imageMagick ..."); try { CmlCommandResult output = executeCliCommand("convert -version"); output.print(); String version = output.getStdout(); String firstLine = version.split("\n")[0]; System.out.println(firstLine); detectImageMagickVersionLabel.setText(firstLine); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); detectImageMagickVersionLabel.setText("not found"); } catch (InterruptedException e) { detectImageMagickVersionLabel.setText("process interrupted"); //e.printStackTrace(); } } public void installImageMagick(ActionEvent actionEvent) { try { //String outStr = executeCliCommand("sudo aptitude search imageMagick"); //String outStr = executeCliCommand("totem"); //String outStr = executeCliCommand("/home/martin/IdeaProjects/NkpValidator/res/bin/pomodoro/Pomodoro"); //URL url = getClass().getResource("res/bin"); //LOGGER.info(url.getFile()); //String outStr = executeCliCommand("/home/martin/IdeaProjects/NkpValidator/res/bin/pomodoro/Pomodoro"); //String outStr = executeCliCommand("/home/martin/IdeaProjects/NkpValidator/res/bin/imagemagick.deb"); String dir = System.getProperty("user.dir"); //String file = dir + File.separator + "res" + File.separator + "bin" String file = null; switch (os.getOsType()) { case WINDOWS: file = dir + "\\resources\\bin\\ImageMagick-7.0.2-4-Q16-x64-dll.exe"; break; case LINUX: file = dir + "/resources/bin/ImageMagick.deb"; break; } //String outStr = executeCliCommand("/home/martin/IdeaProjects/NkpValidator/res/bin/fuckyou.sh"); CmlCommandResult output = executeCliCommand(file); output.print(); String outStr = output.getStdout(); int length = Math.min(outStr.length(), MAX_OUTPUT_LENGTH); installImageMagickLabel.setText(outStr.substring(0, length) + " ..."); //LOGGER.info(outStr); //LOGGER.info("current dir: " + Runtime.getRuntime().); //installImageMagickLabel.setText(outStr); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); String output = e.getMessage(); int length = Math.min(output.length(), MAX_OUTPUT_LENGTH); installImageMagickLabel.setText(output.substring(0, length) + " ..."); //e.printStackTrace(); } catch (InterruptedException e) { installImageMagickLabel.setText("process interrupted"); //e.printStackTrace(); } } public void runImageMagick(ActionEvent actionEvent) { String imageFile = MC_FILE; runImageMagickLabel.setText("running imageMagick ..."); try { CmlCommandResult output = executeCliCommand("identify -verbose " + imageFile); output.print(); String outStr = output.getStdout(); // TODO: 17.8.16 Sehnat priklady s chybama System.out.println(outStr); outStr = outStr.replace("\n", ""); int length = Math.min(outStr.length(), 150); runImageMagickLabel.setText(outStr.substring(0, length) + " ..."); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); runImageMagickLabel.setText("not found"); } catch (InterruptedException e) { runImageMagickLabel.setText("process interrupted"); //e.printStackTrace(); } } public void detectKakaduVersion(ActionEvent actionEvent) { detectKakaduVersionLabel.setText("checking kakadu ..."); try { //TODO: zjistovani verze poradne CmlCommandResult output = executeCliCommand("kdu_expand"); output.print(); /*String version = output.getStdout(); detectKakaduVersionLabel.setText(version);*/ detectKakaduVersionLabel.setText("Kakadu available, cannot determine version"); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); detectKakaduVersionLabel.setText("not available"); } catch (InterruptedException e) { detectKakaduVersionLabel.setText("process interrupted"); //e.printStackTrace(); } } public void runKakadu(ActionEvent actionEvent) { String imageFile = MC_FILE; runKakaduLabel.setText("running kakadu ..."); try { // TODO: 18.8.16 kdu_expand nebyva na PATH CmlCommandResult output = executeCliCommand("kdu_expand -i " + imageFile); output.print(); String outStr = output.getStdout().replace("\n", ""); // TODO: 17.8.16 Sehnat priklady s chybama System.out.println(outStr); int length = Math.min(outStr.length(), 150); runKakaduLabel.setText(outStr.substring(0, length) + " ..."); } catch (IOException e) { //program probably does not exist //e.printStackTrace(); runKakaduLabel.setText("not found"); } catch (InterruptedException e) { runKakaduLabel.setText("process interrupted"); //e.printStackTrace(); } } }
package org.lockss.util; import java.io.*; import java.util.Arrays; import org.apache.oro.text.regex.*; /** Utilities for Files */ public class FileUtil { static final Logger log = Logger.getLogger("FileUtil"); static final int FILE_CHUNK_SIZE = 1024; /** * Converts the file path given into a system-dependent form. * For example, 'var/foo/bar' becomes 'var\foo\bar' on a Windows machine * and vice versa. * @param filePath the path * @return the new path */ public static String sysDepPath(String filePath) { if (File.separatorChar == '/') { return filePath.replace('\\', File.separatorChar); } else { return filePath.replace('/', File.separatorChar); } } /** * Converts the file path given into a system-independent form, utilizing only * '/' as a separator. * @param filePath the path * @return the new path */ public static String sysIndepPath(String filePath) { if (File.separatorChar == '/') { return filePath.replace('\\', '/'); } else { return filePath.replace(File.separatorChar, '/'); } } public static boolean isLegalPath(String path) { int len = path.length(); int depth = 0; int index = -1; // Points to char before start of next // path component. (Normally a slash) while (index<len-2) { depth++; // assume it's a real path component // index+1 points at start of path component. Check first char switch (path.charAt(index+1)) { case '/': depth--; // empty path component ("//") doesn't break; // count. (Equivalent to single slash) case '.': // component starts with "." switch (path.charAt(index+2)) { case '/': depth--; // './' doesn't count break; case '.': // component starts with '..'; is next char '/' or end of string? if (((index+3)==len) || (path.charAt(index+3)=='/')) { depth-=2; // '../' doesn't count, and reduces depth by one } break; } break; } // if depth is negative, path has too many '..'s if (depth < 0) { return false; } index = path.indexOf("/", index+1); if (index < 0) break; } return true; } /** * Compares the content of two files and returns true if they are the same. * If either file is null or a directory, returns false. * @param file1 the first file * @param file2 the second file * @return true iff content is identical * @throws IOException */ public static boolean isContentEqual(File file1, File file2) throws IOException { if ((file1==null) || (file2==null)) { // null is never equal return false; } if ((file1.isDirectory()) || (file2.isDirectory())) { // don't compare directories return false; } if (file1.length() != file2.length()) { // easy length check return false; } // compare both streams FileInputStream fis1 = null; FileInputStream fis2 = null; try { fis1 = new FileInputStream(file1); fis2 = new FileInputStream(file2); byte[] bytes1 = new byte[FILE_CHUNK_SIZE]; byte[] bytes2 = new byte[FILE_CHUNK_SIZE]; while (true) { int bytesRead1 = fis1.read(bytes1); int bytesRead2 = fis2.read(bytes2); if (bytesRead1 != bytesRead2) { // shouldn't really happen, since lengths are equal return false; } else if (bytesRead1==-1) { // EOF reached, exit break; } if (!Arrays.equals(bytes1, bytes2)) { return false; } } return true; } catch (FileNotFoundException fnfe) { // if the file is absent, no comparison return false; } finally { // make sure to close open inputstreams if (fis1!=null) { fis1.close(); } if (fis2!=null) { fis2.close(); } } } public static boolean canWriteToFileWithChar(String location, char testChar) { File file = new File(location, "test"+testChar+"test"); try { FileOutputStream fos = new FileOutputStream(file); fos.close(); return true; } catch (IOException ioe) { return false; } } static Pattern resourceErrorPat = RegexpUtil.uncheckedCompile("Too many open files", Perl5Compiler.READ_ONLY_MASK); /** Return true if the exception was caused by a temporary resource * problem (e.g., running out of file descriptors), not a problem with * the file itself */ public static boolean isTemporaryResourceException(IOException ex) { if (!(ex instanceof FileNotFoundException)) { return false; } return RegexpUtil.getMatcher().contains(ex.getMessage(), resourceErrorPat); } // Support for creating temporary files and directories private static int tmpFileCnt = -1; private static final Object tmpFileLock = new Object(); // tmpFileCnt lock public static File createTempFile(String prefix, String suffix, File dir) throws IOException { if (dir == null) { dir = new File(PlatformUtil.getSystemTempDir()); } return File.createTempFile(prefix, suffix, dir); } public static File createTempFile(String prefix, String suffix) throws IOException { return createTempFile(prefix, suffix, null); } /** Create an empty directory. Details are the same as * File.createTempFile(), but the File object returned is a directory. * @param directory the directory under which to create the new dir * @param prefix dir name prefix * @param suffix dir name suffix * @return The newly created directory */ public static File createTempDir(String prefix, String suffix, File directory) throws IOException { if (prefix == null) throw new NullPointerException(); if (prefix.length() < 3) throw new IllegalArgumentException("Prefix string too short"); String s = (suffix == null) ? ".tmp" : suffix; if (directory == null) { directory = new File(PlatformUtil.getSystemTempDir()); } synchronized (tmpFileLock) { File f = null; for (int ix = 0; ix < 1000; ix++) { f = generateFile(prefix, s, directory); if (f.mkdir()) { return f; } } throw new IOException("Couldn't create temp dir " + f.getPath()); } } /** Create an empty directory in the default temporary-file directory. * Details are the same as File.createTempFile(), but the File object * returned is a directory. * @return The newly created directory */ public static File createTempDir(String prefix, String suffix) throws IOException { return createTempDir(prefix, suffix, null); } /** Ensure the directory exists, creating it and any parents if * necessary. mkdirs() has been observed to fail intermittently on some * platforms, so try a few times if it fails. * @param dir the directory * @return true if the directory already exists, if it was successfully * created, or if it came into being while we were trying to create it. */ public static boolean ensureDirExists(File dir) { if (dir.exists()) { return true; } for (int cnt = 3; cnt > 0; cnt if (dir.mkdirs()) { return true; } if (dir.exists()) { return true; } log.error("Failed to mkdirs(" + dir + "), retrying"); try { Deadline.in(100).sleep(); } catch (InterruptedException e) { } } // If another thread is trying to create the same dir, it might have // suceeded, causing our call to mkdirs to return false, so check again // to see if it's there. (I believe this happened creating the v3state // dir return dir.exists(); } /** Delete the contents of a directory, leaving the empty directory. * @return true iff successful */ public static boolean emptyDir(File dir) { String files[] = dir.list(); if (files == null) { return false; // true would imply there's an empty // dir, which there doesn't seem to be } boolean ret = true; for (int i = 0; i < files.length; i++) { File f = new File(dir, files[i]); if (f.isDirectory()) { ret = ret && emptyDir(f); } if (!f.delete()) { ret = false; } } return ret; } /** Delete a directory and its contents. * @return true iff successful */ public static boolean delTree(File dir) { emptyDir(dir); if (dir.delete()) { return true; } else return !dir.exists(); } private static File generateFile(String prefix, String suffix, File dir) throws IOException { if (tmpFileCnt == -1) { tmpFileCnt = new LockssRandom().nextInt() & 0xffff; } tmpFileCnt++; return new File(dir, prefix + Integer.toString(tmpFileCnt) + suffix); } }
package joliex.meta; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import jolie.CommandLineException; import jolie.CommandLineParser; import jolie.Interpreter; import jolie.lang.NativeType; import jolie.lang.parse.ParserException; import jolie.lang.parse.SemanticException; import jolie.lang.parse.ast.EmbeddedServiceNode; import jolie.lang.parse.ast.InputPortInfo; import jolie.lang.parse.ast.InterfaceDefinition; import jolie.lang.parse.ast.InterfaceExtenderDefinition; import jolie.lang.parse.ast.OneWayOperationDeclaration; import jolie.lang.parse.ast.OperationDeclaration; import jolie.lang.parse.ast.OutputPortInfo; import jolie.lang.parse.ast.PortInfo; import jolie.lang.parse.ast.Program; import jolie.lang.parse.ast.RequestResponseOperationDeclaration; import jolie.lang.parse.ast.types.TypeDefinition; import jolie.lang.parse.ast.types.TypeDefinitionLink; import jolie.lang.parse.ast.types.TypeInlineDefinition; import jolie.lang.parse.util.ParsingUtils; import jolie.lang.parse.util.ProgramInspector; import jolie.runtime.FaultException; import jolie.runtime.JavaService; import jolie.runtime.Value; import jolie.runtime.ValueVector; import jolie.runtime.embedding.RequestResponse; import jolie.util.Range; /** * * @author claudio guidi */ public class MetaJolie extends JavaService { private int MAX_CARD = 2147483647; private Value getNativeType(NativeType type) { Value response = Value.create(); if (type == NativeType.ANY) { response.getFirstChild("any_type").setValue(true); } else if (type == NativeType.STRING) { response.getFirstChild("string_type").setValue(true); } else if (type == NativeType.DOUBLE) { response.getFirstChild("double_type").setValue(true); } else if (type == NativeType.INT) { response.getFirstChild("int_type").setValue(true); } else if (type == NativeType.VOID) { response.getFirstChild("void_type").setValue(true); } else if (type == NativeType.BOOL) { response.getFirstChild("bool_type").setValue(true); } else if (type == NativeType.LONG) { response.getFirstChild("long_type").setValue(true); } else if (type == NativeType.RAW) { response.getFirstChild("raw_type").setValue(true); } return response; } private boolean isNativeType(String type) { if (type.equals("any") || type.equals("string") || type.equals("double") || type.equals("int") || type.equals("void") || type.equals("raw") //|| type.equals("undefined") || type.equals("any") || type.equals("bool") || type.equals("long")) { return true; } else { return false; } } private Value addCardinality(Range range) { Value response = Value.create(); response.getFirstChild("min").setValue(range.min()); if (range.max() == MAX_CARD) { response.getFirstChild("infinite").setValue(1); } else { response.getFirstChild("max").setValue(range.max()); } return response; } private Value addTypeInLine(ArrayList<TypeDefinition> types, ValueVector types_vector, Value name, TypeDefinition typedef) { Value response = Value.create(); response.getFirstChild("name").getFirstChild("name").setValue(typedef.id()); // not useful, inserted for respecting Type if (typedef instanceof TypeDefinitionLink) { response.getFirstChild("root_type").getFirstChild("link").setValue(((TypeDefinitionLink) typedef).linkedTypeName()); insertType(types, types_vector, name, ((TypeDefinitionLink) typedef).linkedType()); } else { TypeInlineDefinition td = (TypeInlineDefinition) typedef; response.getFirstChild("root_type").deepCopy(getNativeType(typedef.nativeType())); if (td.hasSubTypes()) { for (Entry<String, TypeDefinition> entry : td.subTypes()) { response.getChildren("sub_type").add(addSubType(types, types_vector, name, entry.getValue())); } } } return response; } private Value addSubType(ArrayList<TypeDefinition> types, ValueVector types_vector, Value name, TypeDefinition type) { Value response = Value.create(); response.getFirstChild("name").setValue(type.id()); response.getFirstChild("cardinality").deepCopy(addCardinality(type.cardinality())); if (type instanceof TypeDefinitionLink) { response.getFirstChild("type_link").deepCopy(setName(name)); response.getFirstChild("type_link").getFirstChild("name").setValue(((TypeDefinitionLink) type).linkedTypeName()); insertType(types, types_vector, name, ((TypeDefinitionLink) type).linkedType()); } else { response.getFirstChild("type_inline").deepCopy(addTypeInLine(types, types_vector, name, type)); } return response; } private void insertExtendedType(ArrayList<TypeDefinition> types, ValueVector types_vector, Value name, TypeDefinition typedef, TypeDefinition extension) { // to be optimized, similar code with addType if (!types.contains(typedef) && !isNativeType(typedef.id()) && !typedef.id().equals("undefined")) { types.add(typedef); Value type = Value.create(); if (typedef instanceof TypeDefinitionLink) { type.getFirstChild("name").deepCopy(setName(name)); type.getFirstChild("name").getFirstChild("name").setValue(typedef.id()); type.getFirstChild("root_type").getFirstChild("link").getFirstChild("name").setValue(((TypeDefinitionLink) typedef).linkedTypeName()); insertExtendedType(types, types_vector, name, ((TypeDefinitionLink) typedef).linkedType(), extension); } else { TypeInlineDefinition td = (TypeInlineDefinition) typedef; type.getFirstChild("name").deepCopy(setName(name)); type.getFirstChild("name").getFirstChild("name").setValue(td.id()); type.getFirstChild("root_type").deepCopy(getNativeType(td.nativeType())); if (td.hasSubTypes()) { int subtype_counter = 0; for (Entry<String, TypeDefinition> entry : td.subTypes()) { type.getChildren("sub_type").get(subtype_counter).deepCopy(addSubType(types, types_vector, name, entry.getValue())); subtype_counter++; } } // adding extension if (extension != null && extension.hasSubTypes()) { int subtype_counter = type.getChildren("sub_type").size(); for (Entry<String, TypeDefinition> entry : extension.subTypes()) { type.getChildren("sub_type").get(subtype_counter).deepCopy(addSubType(types, types_vector, name, entry.getValue())); subtype_counter++; } } } types_vector.add(type); } } private void insertType(ArrayList<TypeDefinition> types, ValueVector types_vector, Value name, TypeDefinition typedef) { // to be optimized, similar code with addType if (!types.contains(typedef) && !isNativeType(typedef.id()) && !typedef.id().equals("undefined")) { types.add(typedef); Value type = Value.create(); if (typedef instanceof TypeDefinitionLink) { type.getFirstChild("name").deepCopy(setName(name)); type.getFirstChild("name").getFirstChild("name").setValue(typedef.id()); type.getFirstChild("root_type").getFirstChild("link").getFirstChild("name").setValue(((TypeDefinitionLink) typedef).linkedTypeName()); insertType(types, types_vector, name, ((TypeDefinitionLink) typedef).linkedType()); } else { TypeInlineDefinition td = (TypeInlineDefinition) typedef; type.getFirstChild("name").deepCopy(setName(name)); type.getFirstChild("name").getFirstChild("name").setValue(td.id()); type.getFirstChild("root_type").deepCopy(getNativeType(td.nativeType())); if (td.hasSubTypes()) { int subtype_counter = 0; for (Entry<String, TypeDefinition> entry : td.subTypes()) { type.getChildren("sub_type").get(subtype_counter).deepCopy(addSubType(types, types_vector, name, entry.getValue())); subtype_counter++; } } } types_vector.add(type); } } private Value getSubType(TypeDefinition type, Value name) { Value response = Value.create(); response.getFirstChild("name").setValue(type.id()); response.getFirstChild("cardinality").deepCopy(addCardinality(type.cardinality())); if (type instanceof TypeDefinitionLink) { response.getFirstChild("type_link").deepCopy(setName(name)); response.getFirstChild("type_link").getFirstChild("name").setValue(((TypeDefinitionLink) type).linkedTypeName()); } else { response.getFirstChild("type_inline").deepCopy(getType(type, name)); } return response; } private Value getType(TypeDefinition typedef, Value name) { Value type = Value.create(); type.getFirstChild("name").deepCopy(setName(name)); type.getFirstChild("name").getFirstChild("name").setValue(typedef.id()); if (typedef instanceof TypeDefinitionLink) { type.getFirstChild("root_type").getFirstChild("link").getFirstChild("name").setValue(((TypeDefinitionLink) typedef).linkedTypeName()); if (name.getFirstChild("domain").isDefined()) { type.getFirstChild("root_type").getFirstChild("link").getFirstChild("domain").setValue(name.getFirstChild("domain").strValue()); } } else { TypeInlineDefinition td = (TypeInlineDefinition) typedef; type.getFirstChild("root_type").deepCopy(getNativeType(td.nativeType())); if (td.hasSubTypes()) { int subtype_counter = 0; for (Entry<String, TypeDefinition> entry : td.subTypes()) { type.getChildren("sub_type").get(subtype_counter).deepCopy(getSubType(entry.getValue(), name)); subtype_counter++; } } } return type; } private List<TypeDefinition> addType(List<TypeDefinition> types, TypeDefinition typedef) { if (!typedef.id().equals("undefined")) { if (!types.contains(typedef) && !isNativeType(typedef.id())) { types.add(typedef); if (typedef instanceof TypeDefinitionLink) { addType(types, ((TypeDefinitionLink) typedef).linkedType()); } else { TypeInlineDefinition td = (TypeInlineDefinition) typedef; if (td.hasSubTypes()) { for (Entry<String, TypeDefinition> entry : td.subTypes()) { addSubType(types, entry.getValue()); } } } } } return types; } private List<TypeDefinition> addSubType(List<TypeDefinition> types, TypeDefinition subtype) { if (subtype instanceof TypeDefinitionLink) { addType(types, ((TypeDefinitionLink) subtype).linkedType()); } else { TypeInlineDefinition td = (TypeInlineDefinition) subtype; if (td.hasSubTypes()) { for (Entry<String, TypeDefinition> entry : td.subTypes()) { addSubType(types, entry.getValue()); } } } return types; } private Value getInterface(InterfaceDefinition intf, Value name, List<TypeDefinition> types) { Value response = Value.create(); // setting the name response.getFirstChild("name").deepCopy(setName(name)); response.getFirstChild("name").getFirstChild("name").setValue(intf.name()); ValueVector operations = response.getChildren("operations"); // scans operations and types Map< String, OperationDeclaration> operationMap = intf.operationsMap(); for (Entry< String, OperationDeclaration> operationEntry : operationMap.entrySet()) { Value current_operation = Value.create(); if (operationEntry.getValue() instanceof OneWayOperationDeclaration) { OneWayOperationDeclaration oneWayOperation = (OneWayOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(oneWayOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(oneWayOperation.requestType().id()); if (!isNativeType(oneWayOperation.requestType().id())) { addType(types, oneWayOperation.requestType()); } } else { RequestResponseOperationDeclaration requestResponseOperation = (RequestResponseOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(requestResponseOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(requestResponseOperation.requestType().id()); current_operation.getFirstChild("output").deepCopy(setName(name)); current_operation.getFirstChild("output").getFirstChild("name").setValue(requestResponseOperation.responseType().id()); if (!isNativeType(requestResponseOperation.requestType().id())) { addType(types, requestResponseOperation.requestType()); } if (!isNativeType(requestResponseOperation.responseType().id())) { addType(types, requestResponseOperation.responseType()); } Map<String, TypeDefinition> faults = requestResponseOperation.faults(); int faultCounter = 0; for (Entry<String, TypeDefinition> f : faults.entrySet()) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("name").getFirstChild("name").setValue(f.getKey()); if (f.getValue() != null) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").deepCopy(setName(name)); current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").getFirstChild("name").setValue(f.getValue().id()); if (!isNativeType(f.getValue().id())) { addType(types, f.getValue()); } } faultCounter++; } } operations.add(current_operation); } return response; } private List<InterfaceDefinition> addInterfaceToList(List<InterfaceDefinition> list, InterfaceDefinition intf) { if (!list.contains(intf)) { list.add(intf); } return list; } private Value setName(String name) { Value v = Value.create(); v.getFirstChild("name").setValue(name); return v; } private Value setName(String name, String domain) { Value v = setName(name); v.getFirstChild("domain").setValue(domain); return v; } private Value setName(String name, String domain, String registry) { Value v = setName(name, domain); v.getFirstChild("registry").setValue(registry); return v; } private Value setName(Value name) { Value v; if (name.getFirstChild("domain").isDefined() && name.getFirstChild("registry").isDefined()) { v = setName(name.getFirstChild("name").strValue(), name.getFirstChild("domain").strValue(), name.getFirstChild("registry").strValue()); } else if (name.getFirstChild("domain").isDefined() && !name.getFirstChild("registry").isDefined()) { v = setName(name.getFirstChild("name").strValue(), name.getFirstChild("domain").strValue()); } else { v = setName(name.getFirstChild("name").strValue()); } return v; } private Value getInputPort(InputPortInfo portInfo, Value name, OutputPortInfo[] outputPortList) { Value response = Value.create(); response.getFirstChild("name").deepCopy(setName(name)); // setting the name of the port response.getFirstChild("name").getFirstChild("name").setValue(portInfo.id()); InputPortInfo port = (InputPortInfo) portInfo; response.getFirstChild("location").setValue(port.location().toString()); if (port.protocolId() != null) { response.getFirstChild("protocol").setValue(port.protocolId()); } else { response.getFirstChild("protocol").setValue(""); } // scan all the interfaces of the inputPort for (int intf_index = 0; intf_index < portInfo.getInterfaceList().size(); intf_index++) { InterfaceDefinition interfaceDefinition = portInfo.getInterfaceList().get(intf_index); Value input_interface = response.getChildren("interfaces").get(intf_index); addInterfaceToPortInfo(input_interface, interfaceDefinition, name); } // scanning aggregation // extracts interfaces from aggregated outputPorts for (int x = 0; x < portInfo.aggregationList().length; x++) { int i = 0; while (!portInfo.aggregationList()[x].outputPortList()[0].equals(outputPortList[i].id())) { i++; } int curItfIndex = response.getChildren("interfaces").size(); InterfaceExtenderDefinition extender = null; OneWayOperationDeclaration owExtender = null; RequestResponseOperationDeclaration rrExtender = null; if (portInfo.aggregationList()[x].interfaceExtender() != null) { // the interfaces of the outputPort must be extended // only default extension is processed. TODO: extending also specific operation declaration extender = portInfo.aggregationList()[x].interfaceExtender(); if (extender.defaultOneWayOperation() != null) { owExtender = extender.defaultOneWayOperation(); } if (extender.defaultRequestResponseOperation() != null) { rrExtender = extender.defaultRequestResponseOperation(); } } for (InterfaceDefinition interfaceDefinition : outputPortList[i].getInterfaceList()) { Value inputInterface = response.getChildren("interfaces").get(curItfIndex); if (extender != null) { addExtendedInterfaceToPortInfo(inputInterface, interfaceDefinition, name, owExtender, rrExtender); } else { addInterfaceToPortInfo(inputInterface, interfaceDefinition, name); } curItfIndex++; } } return response; } private Value getInputPort(InputPortInfo portInfo, Value name, OutputPortInfo[] outputPortList, List<InterfaceDefinition> interfaces) { Value response = Value.create(); response.getFirstChild("name").deepCopy(setName(name)); // setting the name of the port response.getFirstChild("name").getFirstChild("name").setValue(portInfo.id()); InputPortInfo port = (InputPortInfo) portInfo; response.getFirstChild("location").setValue(port.location().toString()); if (port.protocolId() != null) { response.getFirstChild("protocol").setValue(port.protocolId()); } else { response.getFirstChild("protocol").setValue(""); } // scan all the interfaces of the inputPort for (int intf_index = 0; intf_index < portInfo.getInterfaceList().size(); intf_index++) { InterfaceDefinition interfaceDefinition = portInfo.getInterfaceList().get(intf_index); response.getChildren("interfaces").get(intf_index).getFirstChild("name").deepCopy(setName(name)); response.getChildren("interfaces").get(intf_index).getFirstChild("name").getFirstChild("name").setValue(interfaceDefinition.name()); addInterfaceToList(interfaces, interfaceDefinition); } // scanning aggregation // extracts interfaces from aggregated outputPorts for (int x = 0; x < portInfo.aggregationList().length; x++) { int i = 0; while (!portInfo.aggregationList()[x].outputPortList()[0].equals(outputPortList[i].id())) { i++; } int intf = response.getChildren("interfaces").size(); for (InterfaceDefinition interfaceDefinition : outputPortList[i].getInterfaceList()) { response.getChildren("interfaces").get(intf).getFirstChild("name").deepCopy(setName(name)); response.getChildren("interfaces").get(intf).getFirstChild("name").getFirstChild("name").setValue(interfaceDefinition.name()); addInterfaceToList(interfaces, interfaceDefinition); intf++; } } return response; } private Value getOutputPort(OutputPortInfo portInfo, Value name) { Value response = Value.create(); response.getFirstChild("name").deepCopy(setName(name)); // setting the name of the port response.getFirstChild("name").getFirstChild("name").setValue(portInfo.id()); OutputPortInfo port = (OutputPortInfo) portInfo; response.getFirstChild("location").setValue(port.location().toString()); if (port.protocolId() != null) { response.getFirstChild("protocol").setValue(port.protocolId()); } else { response.getFirstChild("protocol").setValue(""); } // scan all the interfaces of the inputPort for (int intf_index = 0; intf_index < portInfo.getInterfaceList().size(); intf_index++) { InterfaceDefinition interfaceDefinition = portInfo.getInterfaceList().get(intf_index); Value input_interface = response.getChildren("interfaces").get(intf_index); addInterfaceToPortInfo(input_interface, interfaceDefinition, name); } return response; } private void addExtendedInterfaceToPortInfo( Value input_interface, InterfaceDefinition interfaceDefinition, Value name, OneWayOperationDeclaration owExtender, RequestResponseOperationDeclaration rrExtender) { ArrayList<TypeDefinition> types = new ArrayList<TypeDefinition>(); input_interface.getFirstChild("name").deepCopy(setName(name)); input_interface.getFirstChild("name").getFirstChild("name").setValue(interfaceDefinition.name()); ValueVector operations = input_interface.getChildren("operations"); ValueVector interface_types = input_interface.getChildren("types"); // scans operations and types Map< String, OperationDeclaration> operationMap = interfaceDefinition.operationsMap(); for (Entry< String, OperationDeclaration> operationEntry : operationMap.entrySet()) { Value current_operation = Value.create();; if (operationEntry.getValue() instanceof OneWayOperationDeclaration) { OneWayOperationDeclaration oneWayOperation = (OneWayOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(oneWayOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(oneWayOperation.requestType().id()); if (!isNativeType(oneWayOperation.requestType().id())) { insertExtendedType(types, interface_types, name, oneWayOperation.requestType(), owExtender.requestType()); } } else { RequestResponseOperationDeclaration requestResponseOperation = (RequestResponseOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(requestResponseOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(requestResponseOperation.requestType().id()); current_operation.getFirstChild("output").deepCopy(setName(name)); current_operation.getFirstChild("output").getFirstChild("name").setValue(requestResponseOperation.responseType().id()); if (!isNativeType(requestResponseOperation.requestType().id())) { insertExtendedType(types, interface_types, name, requestResponseOperation.requestType(), rrExtender.requestType()); } if (!isNativeType(requestResponseOperation.responseType().id())) { insertExtendedType(types, interface_types, name, requestResponseOperation.responseType(), rrExtender.responseType()); } Map<String, TypeDefinition> faults = requestResponseOperation.faults(); int faultCounter = 0; for (Entry<String, TypeDefinition> f : faults.entrySet()) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("name").getFirstChild("name").setValue(f.getKey()); if (f.getValue() != null) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").deepCopy(setName(name)); current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").getFirstChild("name").setValue(f.getValue().id()); if (!isNativeType(f.getValue().id())) { insertExtendedType(types, interface_types, name, f.getValue(), rrExtender.faults().get( f.getKey() )); } } faultCounter++; } } operations.add(current_operation); } } private void addInterfaceToPortInfo(Value input_interface, InterfaceDefinition interfaceDefinition, Value name) { ArrayList<TypeDefinition> types = new ArrayList<TypeDefinition>(); input_interface.getFirstChild("name").deepCopy(setName(name)); input_interface.getFirstChild("name").getFirstChild("name").setValue(interfaceDefinition.name()); ValueVector operations = input_interface.getChildren("operations"); ValueVector interface_types = input_interface.getChildren("types"); // scans operations and types Map< String, OperationDeclaration> operationMap = interfaceDefinition.operationsMap(); for (Entry< String, OperationDeclaration> operationEntry : operationMap.entrySet()) { Value current_operation = Value.create();; if (operationEntry.getValue() instanceof OneWayOperationDeclaration) { OneWayOperationDeclaration oneWayOperation = (OneWayOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(oneWayOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(oneWayOperation.requestType().id()); if (!isNativeType(oneWayOperation.requestType().id())) { insertType(types, interface_types, name, oneWayOperation.requestType()); } } else { RequestResponseOperationDeclaration requestResponseOperation = (RequestResponseOperationDeclaration) operationEntry.getValue(); current_operation.getFirstChild("operation_name").setValue(requestResponseOperation.id()); current_operation.getFirstChild("input").deepCopy(setName(name)); current_operation.getFirstChild("input").getFirstChild("name").setValue(requestResponseOperation.requestType().id()); current_operation.getFirstChild("output").deepCopy(setName(name)); current_operation.getFirstChild("output").getFirstChild("name").setValue(requestResponseOperation.responseType().id()); if (!isNativeType(requestResponseOperation.requestType().id())) { insertType(types, interface_types, name, requestResponseOperation.requestType()); } if (!isNativeType(requestResponseOperation.responseType().id())) { insertType(types, interface_types, name, requestResponseOperation.responseType()); } Map<String, TypeDefinition> faults = requestResponseOperation.faults(); int faultCounter = 0; for (Entry<String, TypeDefinition> f : faults.entrySet()) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("name").getFirstChild("name").setValue(f.getKey()); if (f.getValue() != null) { current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").deepCopy(setName(name)); current_operation.getChildren("fault").get(faultCounter).getFirstChild("type_name").getFirstChild("name").setValue(f.getValue().id()); if (!isNativeType(f.getValue().id())) { insertType(types, interface_types, name, f.getValue()); } } faultCounter++; } } operations.add(current_operation); } } private Value getPort(PortInfo portInfo, Value name, List<InterfaceDefinition> interfaces) { Value response = Value.create(); // setting domain and registry from request response.getFirstChild("name").deepCopy(setName(name)); // setting the name of the port response.getFirstChild("name").getFirstChild("name").setValue(portInfo.id()); if (portInfo instanceof InputPortInfo) { InputPortInfo port = (InputPortInfo) portInfo; if (port.location() != null) { response.getFirstChild("location").setValue(port.location().toString()); } else { response.getFirstChild("location").setValue("local"); } if (port.protocolId() != null) { response.getFirstChild("protocol").setValue(port.protocolId()); } else { response.getFirstChild("protocol").setValue(""); } } else if (portInfo instanceof OutputPortInfo) { OutputPortInfo port = (OutputPortInfo) portInfo; if (port.location() != null) { response.getFirstChild("location").setValue(port.location().toString()); } else { response.getFirstChild("location").setValue("local"); } if (port.protocolId() != null) { response.getFirstChild("protocol").setValue(port.protocolId()); } else { response.getFirstChild("protocol").setValue(""); } } // scans interfaces List<InterfaceDefinition> interfaceList = portInfo.getInterfaceList(); for (int intf = 0; intf < interfaceList.size(); intf++) { InterfaceDefinition interfaceDefinition = portInfo.getInterfaceList().get(intf); // setting the name of the interface within the port response response.getChildren("interfaces").get(intf).getFirstChild("name").deepCopy(setName(name)); response.getChildren("interfaces").get(intf).getFirstChild("name").getFirstChild("name").setValue(interfaceDefinition.name()); interfaces = addInterfaceToList(interfaces, interfaceDefinition); } return response; } private String[] getArgs( String filename ) { Interpreter interpreter = Interpreter.getInstance(); String[] interpreterIncludePaths = interpreter.includePaths(); String[] includePaths = new String[ interpreterIncludePaths.length * 2 + 1 ]; includePaths[ 0 ] = filename; for( int i = 0; i < interpreterIncludePaths.length; i++ ) { includePaths[ i*2 + 1 ] = "-i"; includePaths[ i*2 + 2 ] = interpreterIncludePaths[ i ]; } return includePaths; } @RequestResponse public Value checkNativeType(Value request) { Value response = Value.create(); response.getFirstChild("result").setValue(isNativeType(request.getFirstChild("type_name").strValue())); return response; } @RequestResponse public Value parseRoles(Value request) { Value response = Value.create(); try { response.getFirstChild("name").deepCopy(setName(request.getFirstChild("rolename"))); String[] args = getArgs( request.getFirstChild("filename").strValue() ); CommandLineParser cmdParser = new CommandLineParser(args, MetaJolie.class.getClassLoader()); args = cmdParser.arguments(); Program program = ParsingUtils.parseProgram( cmdParser.programStream(), URI.create("file:" + cmdParser.programFilepath()), cmdParser.includePaths(), MetaJolie.class.getClassLoader(), cmdParser.definedConstants()); ProgramInspector inspector = ParsingUtils.createInspector(program); URI originalFile = program.context().source(); // scanning first inputport InputPortInfo[] inputPortList = inspector.getInputPorts(originalFile); Value input = response.getFirstChild("input"); if (inputPortList.length > 0) { InputPortInfo inputPort = inputPortList[0]; input.deepCopy(getInputPort(inputPort, request.getFirstChild("name"), inspector.getOutputPorts())); } // scanning first outputPort if it exists OutputPortInfo[] outputPortList = inspector.getOutputPorts(); if (outputPortList.length > 0) { Value output = response.getFirstChild("output"); output.deepCopy(getOutputPort(outputPortList[0], request.getFirstChild("name"))); } } catch (CommandLineException e) { // TO DO e.printStackTrace(); } catch (IOException e) { // TO DO e.printStackTrace(); } catch (ParserException e) { // TO DO e.printStackTrace(); } catch (SemanticException e) { // TO DO e.printStackTrace(); } return response; } @RequestResponse public Value getMetaData(Value request) throws FaultException { String domain = ""; List<TypeDefinition> types = new ArrayList<TypeDefinition>(); List<InterfaceDefinition> interfaces = new ArrayList<InterfaceDefinition>(); Value response = Value.create(); try { String[] args = getArgs( request.getFirstChild("filename").strValue() ); if (request.getFirstChild("name").getFirstChild("domain").isDefined()) { domain = request.getFirstChild("name").getFirstChild("domain").strValue(); } CommandLineParser cmdParser = new CommandLineParser(args, MetaJolie.class.getClassLoader()); args = cmdParser.arguments(); Program program = ParsingUtils.parseProgram( cmdParser.programStream(), URI.create("file:" + cmdParser.programFilepath()), cmdParser.includePaths(), MetaJolie.class.getClassLoader(), cmdParser.definedConstants()); ProgramInspector inspector = ParsingUtils.createInspector(program); URI originalFile = program.context().source(); cmdParser.close(); response.getFirstChild("service").getFirstChild("name").deepCopy(setName(request.getFirstChild("name"))); OutputPortInfo[] outputPortList = inspector.getOutputPorts(); if (outputPortList.length > 0) { ValueVector output = response.getChildren("output"); for (int op = 0; op < outputPortList.length; op++) { OutputPortInfo outputPort = outputPortList[ op]; output.get(op).deepCopy(getPort(outputPort, request.getFirstChild("name"), interfaces)); response.getFirstChild("service").getChildren("output").get(op).getFirstChild("name").setValue(outputPort.id()); response.getFirstChild("service").getChildren("output").get(op).getFirstChild("domain").setValue(domain); } } InputPortInfo[] inputPortList = inspector.getInputPorts(originalFile); ValueVector input = response.getChildren("input"); if (inputPortList.length > 0) { for (int ip = 0; ip < inputPortList.length; ip++) { InputPortInfo inputPort = inputPortList[ ip]; input.get(ip).deepCopy(getInputPort(inputPort, request.getFirstChild("name"), outputPortList, interfaces)); response.getFirstChild("service").getChildren("input").get(ip).getFirstChild("name").setValue(inputPort.id()); response.getFirstChild("service").getChildren("input").get(ip).getFirstChild("domain").setValue(domain); } } // adding interfaces for (int intf = 0; intf < interfaces.size(); intf++) { InterfaceDefinition interfaceDefinition = interfaces.get(intf); response.getChildren("interfaces").get(intf).deepCopy(getInterface(interfaceDefinition, request.getFirstChild("name"), types)); } // adding types for (int tp = 0; tp < types.size(); tp++) { TypeDefinition typeDefinition = types.get(tp); response.getChildren("types").get(tp).deepCopy(getType(typeDefinition, request.getFirstChild("name"))); } // adding embedded services EmbeddedServiceNode[] embeddedServices = inspector.getEmbeddedServices(); for (int es = 0; es < embeddedServices.length; es++) { response.getChildren("embeddedServices").get(es).getFirstChild("type").setValue(embeddedServices[ es].type().toString()); response.getChildren("embeddedServices").get(es).getFirstChild("servicepath").setValue(embeddedServices[ es].servicePath()); response.getChildren("embeddedServices").get(es).getFirstChild("portId").setValue(embeddedServices[ es].portId()); } } catch (CommandLineException e) { } catch (IOException e) { } catch (ParserException e) { Value fault = Value.create(); fault.getFirstChild("message").setValue(e.getMessage()); fault.getFirstChild("line").setValue(e.context().line()); fault.getFirstChild("sourceName").setValue(e.context().sourceName()); throw new FaultException("ParserException", fault); } catch (SemanticException e) { Value fault = Value.create(); int i = 0; for( SemanticException.SemanticError error : e.getErrorList() ) { fault.getChildren( "error").get( i ).getFirstChild("message").setValue(error.getMessage()); fault.getChildren( "error").get( i ).getFirstChild("line").setValue(error.context().line()); fault.getChildren( "error").get( i ).getFirstChild("sourceName").setValue(error.context().sourceName()); i++; } throw new FaultException("SemanticException", fault); } return response; } @RequestResponse public Value getInputPortMetaData(Value request) throws FaultException { String domain = ""; List<TypeDefinition> types = new ArrayList<TypeDefinition>(); List<InterfaceDefinition> interfaces = new ArrayList<InterfaceDefinition>(); Value response = Value.create(); try { String[] args = getArgs( request.getFirstChild("filename").strValue() ); if (request.getFirstChild("domain").isDefined()) { domain = request.getFirstChild("domain").strValue(); } CommandLineParser cmdParser = new CommandLineParser(args, MetaJolie.class.getClassLoader()); args = cmdParser.arguments(); Program program = ParsingUtils.parseProgram( cmdParser.programStream(), URI.create("file:" + cmdParser.programFilepath()), cmdParser.includePaths(), MetaJolie.class.getClassLoader(), cmdParser.definedConstants()); ProgramInspector inspector = ParsingUtils.createInspector(program); URI originalFile = program.context().source(); InputPortInfo[] inputPortList = inspector.getInputPorts(originalFile); ValueVector input = response.getChildren("input"); if (inputPortList.length > 0) { for (int ip = 0; ip < inputPortList.length; ip++) { InputPortInfo inputPort = inputPortList[ ip]; input.get(ip).deepCopy(getInputPort(inputPort, request.getFirstChild("name"), inspector.getOutputPorts())); } } cmdParser.close(); } catch (CommandLineException e) { throw new FaultException("InputPortMetaDataFault", e); } catch (IOException e) { throw new FaultException("InputPortMetaDataFault", e); } catch (ParserException e) { Value fault = Value.create(); fault.getFirstChild("message").setValue(e.getMessage()); fault.getFirstChild("line").setValue(e.context().line()); fault.getFirstChild("sourceName").setValue(e.context().sourceName()); throw new FaultException("ParserException", fault); } catch (SemanticException e) { Value fault = Value.create(); List<SemanticException.SemanticError> errorList = e.getErrorList(); for( int i = 0; i < errorList.size(); i++ ) { fault.getChildren( "error").get( i ).getFirstChild("message").setValue(errorList.get(i).getMessage()); fault.getChildren( "error").get( i ).getFirstChild("line").setValue(errorList.get(i).context().line()); fault.getChildren( "error").get( i ).getFirstChild("sourceName").setValue(errorList.get( i ).context().sourceName()); } throw new FaultException("SemanticException", fault); } return response; } private Value findType(ValueVector types, String typeName, String typeDomain) { Iterator iterator = types.iterator(); boolean found = false; int index = 0; while (index < types.size() && !found) { Value type = (Value) iterator.next(); String name = type.getFirstChild("name").getFirstChild("name").strValue(); String domain = type.getFirstChild("name").getFirstChild("domain").strValue(); if (name.equals(typeName) && domain.equals(typeDomain)) { found = true; } index++; } return types.get(index - 1); } private void castingSubType(ValueVector subTypes, String elementName, ValueVector messageVector, ValueVector types, Value response) throws FaultException { boolean found = false; int index = 0; while (!found && index < subTypes.size()) { if (subTypes.get(index).getFirstChild("name").strValue().equals(elementName)) { found = true; } index++; } if (!found) { throw new FaultException("TypeMismatch"); } else { Value subType = subTypes.get(index - 1); // check cardinality if (messageVector.size() < subType.getFirstChild("Cardinality").getFirstChild("min").intValue()) { throw new FaultException("TypeMismatch"); } if (subType.getFirstChild("cardinality").getChildren("max").size() > 0) { if (messageVector.size() > subType.getFirstChild("cardinality").getFirstChild("max").intValue()) { throw new FaultException("TypeMismatch"); } } // casting all the elements for (int el = 0; el < messageVector.size(); el++) { if (subType.getChildren("type_inline").size() > 0) { castingType(subType.getFirstChild("type_inline"), messageVector.get(el), types, response.getChildren(elementName).get(el)); } else if (subType.getChildren("type_link").size() > 0) { String name = subType.getFirstChild("type_link").getFirstChild("name").strValue(); String domain = subType.getFirstChild("type_link").getFirstChild("domain").strValue(); Value typeToCast = findType(types, name, domain); castingType(typeToCast, messageVector.get(el), types, response.getChildren(elementName).get(el)); } } } } private void castingType(Value typeToCast, Value message, ValueVector types, Value response) throws FaultException { // casting root if (typeToCast.getFirstChild("root_type").getChildren("string_type").size() > 0) { response.setValue(message.strValue()); } if (typeToCast.getFirstChild("root_type").getChildren("int_type").size() > 0) { response.setValue(message.intValue()); } if (typeToCast.getFirstChild("root_type").getChildren("double_type").size() > 0) { response.setValue(message.doubleValue()); } if (typeToCast.getFirstChild("root_type").getChildren("any_type").size() > 0) { response.setValue(message.strValue()); } if (typeToCast.getFirstChild("root_type").getChildren("int_type").size() > 0) { response.setValue(message.intValue()); } if (typeToCast.getFirstChild("root_type").getChildren("void_type").size() > 0) { } if (typeToCast.getFirstChild("root_type").getChildren("long_type").size() > 0) { response.setValue(message.longValue()); } if (typeToCast.getFirstChild("root_type").getChildren("int_type").size() > 0) { response.setValue(message.intValue()); } if (typeToCast.getFirstChild("root_type").getChildren("link").size() > 0) { String domain = ""; if (typeToCast.getFirstChild("root_type").getFirstChild("link").getChildren("domain").size() > 0) { domain = typeToCast.getFirstChild("root_type").getFirstChild("link").getFirstChild("domain").strValue(); } Value linkRootType = findType(types, typeToCast.getFirstChild("root_type").getFirstChild("link").getFirstChild("name").strValue(), domain).getFirstChild("root_type"); castingType(linkRootType, message, types, response); } // casting subTypes if (typeToCast.getChildren("sub_type").size() > 0) { // ranging over all the subfields of the message for (Entry<String, ValueVector> e : message.children().entrySet()) { castingSubType(typeToCast.getChildren("sub_type"), e.getKey(), message.getChildren(e.getKey()), types, response); } } } @RequestResponse public Value messageTypeCast(Value request) throws FaultException { Value message = request.getFirstChild("message"); String messageTypeName = request.getFirstChild("types").getFirstChild("messageTypeName").getFirstChild("name").strValue(); String messageTypeDomain = request.getFirstChild("types").getFirstChild("messageTypeName").getFirstChild("domain").strValue(); ValueVector types = request.getFirstChild("types").getChildren("types"); Value response = Value.create(); // get message type Value messageType = findType(types, messageTypeName, messageTypeDomain); // casting root node castingType(messageType, message, types, response.getFirstChild("message")); return response; } }
package org.mit.jstreamit; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; /** * * @author Jeffrey Bosboom <jeffreybosboom@gmail.com> * @since 11/8/2012 */ public final class Portal<I> { private final Class<I> klass; private final List<PrimitiveWorker<?, ?>> recipients = new ArrayList<>(); /** * sender -> (recipient -> constraint). */ private final Map<PrimitiveWorker<?, ?>, Map<PrimitiveWorker<?, ?>, MessageConstraint>> constraints = new IdentityHashMap<>(); public Portal(Class<I> klass) { if (!klass.isInterface()) throw new IllegalArgumentException(klass+" is not an interface type"); //TODO: are these checks too strict? The interpreter can check these //dynamically and the compiler can tell exactly which methods are called, //so we could be more lenient here. for (Method m : klass.getMethods()) { if (m.getDeclaringClass().equals(Object.class)) continue; if (!m.getReturnType().equals(void.class)) throw new IllegalArgumentException("Method "+m.toGenericString()+" in "+klass+" returns non-void"); //TODO: do we need m.getGenericExceptionTypes() to handle "throws E"? if (m.getExceptionTypes().length > 0) throw new IllegalArgumentException("Method "+m.toGenericString()+" in "+klass+" may throw"); } this.klass = klass; } public void addRecipient(I recipient) { //TODO: public <T extends PrimitiveWorker & I> void addRecipient(T recipient) if (recipient == null) throw new NullPointerException(); //I'm pretty sure this can only happen via unchecked casts or //incompatible class file changes, but we should check anyway. if (!klass.isInstance(recipient)) throw new IllegalArgumentException("Recipient "+recipient+" not instance of "+klass); //Messaging a non-worker doesn't make sense -- SDEP isn't defined. if (!(recipient instanceof PrimitiveWorker)) throw new IllegalArgumentException("Recipient "+recipient+" not instance of Filter, Splitter or Joiner"); recipients.add((PrimitiveWorker<?, ?>)recipient); } /** * Gets a handle from this portal which can be used to send messages to the * registered recipients. This method should only be called from the work() * function of a filter, splitter or joiner. * * The sender argument should always be the this reference of the filter, * splitter or joiner from which getHandle() is being called; other values * will result in strange behavior. Unfortunately, the Java language does * not allow enforcing this requirement. * * TODO briefly explain latency * * The returned handle appears to be an I instance, but is actually a magic * object that translates calls of I methods to messages. That is, calling a * method on the returned handle with some arguments results in that method * being invoked with those arguments on each of the recipients after the * specified latency. Only methods declared in I or a superinterface of I * may be invoked through the handle; Object methods may not be invoked. The * argument objects must not be modified until after all recipients have * received and processed their messages. Handles should not be stored in * local variables or fields. * * Implementation note: this is a JIT hook method. * * TODO: PrimitiveWorker is package-private and we're * leaking it into the public API here. Alternatives: * --create three overloads of getHandle: Filter, Splitter and Joiner * --just make PrimitiveWorker public (probably renamed to Worker and with * all the Channel/predecessor/successor stuff still package-private) * * TODO: latency ranges? * @param sender the message sender * @param latency the message latency * @return an I whose calls generate messages */ public I getHandle(PrimitiveWorker<?, ?> sender, int latency) { if (sender == null) throw new NullPointerException(); Handle handler = new Handle(sender, recipients, latency, constraints.get(sender)); @SuppressWarnings("unchecked") I handle = (I)Proxy.newProxyInstance(klass.getClassLoader(), new Class<?>[]{klass}, handler); return handle; } /** * Gets the list of registered recipients. MessageConstraint needs this. * @return the list of registered recipients */ /* package-private */ List<PrimitiveWorker<?, ?>> getRecipients() { List<PrimitiveWorker<?, ?>> retval = Collections.unmodifiableList(recipients); return retval; } /** * Fills in our constraints map from the list of all constraints in the * graph. (We only remember constraints about us.) Called by the * interpreter after finding the constraints and before execution begins. */ /* package-private */ void setConstraints(List<MessageConstraint> allConstraints) { for (MessageConstraint c : allConstraints) { if (c.getPortal() != this) continue; Map<PrimitiveWorker<?, ?>, MessageConstraint> senderMap = constraints.get(c.getSender()); if (senderMap == null) { senderMap = new IdentityHashMap<>(); constraints.put(c.getSender(), senderMap); } senderMap.put(c.getRecipient(), c); } } /* package-private */ static class Message implements Comparable<Message> { public final Method method; public final Object[] args; /** * The execution immediately before which this message will be received. */ public long timeToReceive; Message(Method method, Object[] args, long timeToReceive) { this.method = method; this.args = args; this.timeToReceive = timeToReceive; } @Override public int compareTo(Message o) { return Long.compare(timeToReceive, o.timeToReceive); } @Override public String toString() { String argString = Arrays.toString(args); argString = argString.substring(1, argString.length()-1); return method.getDeclaringClass().getSimpleName()+"."+method.getName()+"("+argString+") at "+timeToReceive; } } /** * The back-end of the dynamic proxy created in getHandle() (strictly * speaking, this isn't the actual handle object, but HandleHandler seemed * like a dumb name). */ private static class Handle implements InvocationHandler { private final PrimitiveWorker<?, ?> sender; private final List<PrimitiveWorker<?, ?>> recipients; private final int latency; /** * Maps recipients to constraints for this sender. */ private final Map<PrimitiveWorker<?, ?>, MessageConstraint> constraints; private Handle(PrimitiveWorker<?, ?> sender, List<PrimitiveWorker<?, ?>> recipients, int latency, Map<PrimitiveWorker<?, ?>, MessageConstraint> constraints) { this.sender = sender; this.recipients = recipients; this.latency = latency; this.constraints = constraints; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { //We check in the Portal constructor that all non-Object methods are //valid to call through, so check we aren't calling an Object method. if (method.getDeclaringClass().equals(Object.class)) throw new IllegalStreamGraphException("Call to Object method "+method+" through portal", sender); //We probably don't have access to the message interface, but we //need to call its methods anyway. This might fail under a security //manager, or if the interface is somehow security sensitive to the //Java platform(?). method.setAccessible(true); for (PrimitiveWorker<?, ?> recipient : recipients) { MessageConstraint constraint = constraints.get(recipient); assert constraint != null; long timeToDelivery; switch (constraint.getDirection()) { case DOWNSTREAM: //We add one to the reverseSdep result because we're //going downstream, thus e.g. if we're in our first //execution (sender.getExecutions() == 0), the message //should be delivered downstream at recipient's 0, but //we expect TTD to be greater than getExecutions(). //Classic StreamIt adjusts the TTD at delivery to //account for this; we'll do it here. //TODO: is the inner +1 correct? timeToDelivery = constraint.reverseSdep(sender.getExecutions()+constraint.getLatency()+1)+1; break; case UPSTREAM: //TODO: is the +1 correct? timeToDelivery = constraint.sdep(sender.getExecutions()+constraint.getLatency()+1); break; case EQUAL: case INCOMPARABLE: throw new IllegalStreamGraphException("Illegal messaging: "+constraint); default: throw new AssertionError(); } //Queue up the message at the recipient. Message message = new Message(method, args, timeToDelivery); recipient.sendMessage(message); } //Methods on the portal interface return void. return null; } } }
package org.mockito; import org.mockito.listeners.InvocationListener; import org.mockito.mock.SerializableMode; import org.mockito.stubbing.Answer; import java.io.Serializable; /** * Allows mock creation with additional mock settings. * <p/> * Don't use it too often. * Consider writing simple tests that use simple mocks. * Repeat after me: simple tests push simple, KISSy, readable & maintainable code. * If you cannot write a test in a simple way - refactor the code under test. * <p/> * Examples of mock settings: * <pre class="code"><code class="java"> * //Creates mock with different default answer & name * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie") * ); * * //Creates mock with different default answer, descriptive name and extra interfaces * Foo mock = mock(Foo.class, withSettings() * .defaultAnswer(RETURNS_SMART_NULLS) * .name("cool mockie") * .extraInterfaces(Bar.class)); * </code></pre> * {@link MockSettings} has been introduced for two reasons. * Firstly, to make it easy to add another mock setting when the demand comes. * Secondly, to enable combining together different mock settings without introducing zillions of overloaded mock() methods. */ public interface MockSettings extends Serializable { MockSettings extraInterfaces(Class<?>... interfaces); /** * Specifies mock name. Naming mocks can be helpful for debugging - the name is used in all verification errors. * <p> * Beware that naming mocks is not a solution for complex code which uses too many mocks or collaborators. * <b>If you have too many mocks then refactor the code</b> so that it's easy to test/debug without necessity of naming mocks. * <p> * <b>If you use &#064;Mock annotation then you've got naming mocks for free!</b> &#064;Mock uses field name as mock name. {@link Mock Read more.} * <p> * Examples: * <pre class="code"><code class="java"> * Foo foo = mock(Foo.class, withSettings().name("foo")); * * //Below does exactly the same: * Foo foo = mock(Foo.class, "foo"); * </code></pre> * @param name the name of the mock, later used in all verification errors * @return settings instance so that you can fluently specify other settings */ MockSettings name(String name); /** * Specifies the instance to spy on. Makes sense only for spies/partial mocks. * * Sets the instance that will be spied. Actually copies the internal fields of the passed instance to the mock. * <p> * As usual you are going to read <b>the partial mock warning</b>: * Object oriented programming is more or less about tackling complexity by dividing the complexity into separate, specific, SRPy objects. * How does partial mock fit into this paradigm? Well, it just doesn't... * Partial mock usually means that the complexity has been moved to a different method on the same object. * In most cases, this is not the way you want to design your application. * <p> * However, there are rare cases when partial mocks come handy: * dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.) * However, I wouldn't use partial mocks for new, test-driven & well-designed code. * <p> * Enough warnings about partial mocks, see an example how spiedInstance() works: * <pre class="code"><code class="java"> * Foo foo = mock(Foo.class, withSettings().spiedInstance(fooInstance)); * * //Below does exactly the same: * Foo foo = spy(fooInstance); * </code></pre> * * About stubbing for a partial mock, as it is a spy it will always call the real method, unless you use the * <code>doReturn</code>|<code>Throw</code>|<code>Answer</code>|<code>CallRealMethod</code> stubbing style. Example: * * <pre class="code"><code class="java"> * List list = new LinkedList(); * List spy = spy(list); * * //Impossible: real method is called so spy.get(0) throws IndexOutOfBoundsException (the list is yet empty) * when(spy.get(0)).thenReturn("foo"); * * //You have to use doReturn() for stubbing * doReturn("foo").when(spy).get(0); * </code> * * @param instance to spy on * @return settings instance so that you can fluently specify other settings */ MockSettings spiedInstance(Object instance); /** * Specifies default answers to interactions. * It's quite advanced feature and typically you don't need it to write decent tests. * However it can be helpful when working with legacy systems. * <p> * It is the default answer so it will be used <b>only when you don't</b> stub the method call. * * <pre class="code"><code class="java"> * Foo mock = mock(Foo.class, withSettings().defaultAnswer(RETURNS_SMART_NULLS)); * Foo mockTwo = mock(Foo.class, withSettings().defaultAnswer(new YourOwnAnswer())); * * //Below does exactly the same: * Foo mockTwo = mock(Foo.class, new YourOwnAnswer()); * </code></pre> * * @param defaultAnswer default answer to be used by mock when not stubbed * @return settings instance so that you can fluently specify other settings */ @SuppressWarnings("unchecked") MockSettings defaultAnswer(Answer defaultAnswer); /** * Configures the mock to be serializable. With this feature you can use a mock in a place that requires dependencies to be serializable. * <p> * WARNING: This should be rarely used in unit testing. * <p> * The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This * was in a web environment and the objects from the external dependency were being serialized to pass between layers. * <p> * Example: * <pre class="code"><code class="java"> * List serializableMock = mock(List.class, withSettings().serializable()); * </code></pre> * * @return settings instance so that you can fluently specify other settings * @since 1.8.1 */ MockSettings serializable(); /** * Configures the mock to be serializable with a specific serializable mode. * With this feature you can use a mock in a place that requires dependencies to be serializable. * <p> * WARNING: This should be rarely used in unit testing. * <p> * The behaviour was implemented for a specific use case of a BDD spec that had an unreliable external dependency. This * was in a web environment and the objects from the external dependency were being serialized to pass between layers. * * <pre class="code"><code class="java"> * List serializableMock = mock(List.class, withSettings().serializable(SerializableMode.ACROSS_CLASSLOADERS)); * </code></pre> * * @param mode serialization mode * @return settings instance so that you can fluently specify other settings * @since 1.9.8 */ MockSettings serializable(SerializableMode mode); /** * Enables real-time logging of method invocations on this mock. Can be used * during test debugging in order to find wrong interactions with this mock. * <p> * Invocations are logged as they happen to the standard output stream. * <p> * Calling this method multiple times makes no difference. * <p> * Example: * <pre class="code"><code class="java"> * List mockWithLogger = mock(List.class, withSettings().verboseLogging()); * </code></pre> * * @return settings instance so that you can fluently specify other settings */ MockSettings verboseLogging(); /** * Registers a listener for method invocations on this mock. The listener is * notified every time a method on this mock is called. * <p> * Multiple listeners may be added, but the same object is only added once. * The order, in which the listeners are added, is not guaranteed to be the * order in which the listeners are notified. * * Example: * <pre class="code"><code class="java"> * List mockWithListener = mock(List.class, withSettings().invocationListeners(new YourInvocationListener())); * </code></pre> * * See the {@link InvocationListener listener interface} for more details. * * @param listeners The invocation listeners to add. May not be null. * @return settings instance so that you can fluently specify other settings */ MockSettings invocationListeners(InvocationListener... listeners); /** * A stub-only mock does not record method * invocations, thus saving memory but * disallowing verification of invocations. * <p> * Example: * <pre class="code"><code class="java"> * List stubOnly = mock(List.class, withSettings().stubOnly()); * </code></pre> * * @return settings instance so that you can fluently specify other settings */ MockSettings stubOnly(); }
package org.archive.wayback.archivalurl; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.archive.wayback.ResultURIConverter; import org.archive.wayback.core.SearchResult; /** * Library for updating arbitrary attributes in arbitrary tags to rewrite * HTML documents so URI references point back into the Wayback Machine. * Attempts to make minimal changes so nothing gets broken during this process. * * @author brad * @version $Date$, $Revision$ */ public class TagMagix { // TODO: should this be a HashTable (synchronized) ? private static HashMap pcPatterns = new HashMap(); private static String QUOTED_ATTR_VALUE= "(?:\"[^\">]*\")"; private static String APOSED_ATTR_VALUE = "(?:'[^'>]*')"; private static String RAW_ATTR_VALUE = "(?:[^ \\t\\n\\x0B\\f\\r>\"']+)"; private static String ANY_ATTR_VALUE = QUOTED_ATTR_VALUE+ "|" + APOSED_ATTR_VALUE + "|" + RAW_ATTR_VALUE; /** * get (and cache) a regex Pattern for locating an HTML attribute value * within a particular tag. if found, the pattern will have the attribute * value in group 1. Note that the attribute value may contain surrounding * apostrophe(') or quote(") characters. * * @param tagName * @param attrName * @return Pattern to match the tag-attribute's value */ private static Pattern getPattern(String tagName, String attrName) { String key = tagName + " " + attrName; Pattern pc = (Pattern) pcPatterns.get(key); if(pc == null) { String tagPatString = "<\\s*" + tagName + "\\s+[^>]*\\b" + attrName + "\\s*=\\s*(" + ANY_ATTR_VALUE + ")(?:\\s|>)?"; pc = Pattern.compile(tagPatString,Pattern.CASE_INSENSITIVE); pcPatterns.put(key,pc); } return pc; } /** * Alter the HTML document in page, updating URLs in the attrName * attributes of all tagName tags such that: * * 1) absolute URLs are prefixed with: * wmPrefix + pageTS * 2) server-relative URLs are prefixed with: * wmPrefix + pageTS + (host of page) * 3) path-relative URLs are prefixed with: * wmPrefix + pageTS + (attribute URL resolved against pageUrl) * * @param page * @param uriConverter * @param result * @param baseUrl * @param tagName * @param attrName */ public static void markupTagREURIC (StringBuilder page, ResultURIConverter uriConverter, SearchResult result, String baseUrl, String tagName, String attrName) { Pattern tagPat = getPattern(tagName, attrName); Matcher matcher = tagPat.matcher(page); int idx = 0; while(matcher.find(idx)) { String url = matcher.group(1); int origUrlLength = url.length(); int attrStart = matcher.start(1); int attrEnd = matcher.end(1); String quote = ""; if(url.charAt(0) == '"') { quote = "\""; url = url.substring(1,url.length()-1); } else if(url.charAt(0) == '\'') { quote = "'"; url = url.substring(1,url.length()-1); } String replayUrl = quote + uriConverter.makeRedirectReplayURI( result, url, baseUrl) + quote; int delta = replayUrl.length() - origUrlLength; page.replace(attrStart,attrEnd,replayUrl); idx = attrEnd + delta; } } /** * find and return the href value within a BASE tag inside the HTML document * within the StringBuffer page. returns null if no BASE-HREF is found. * * @param page * @return URL of base-href within page, or null if none is found. */ public static String getBaseHref(StringBuilder page) { String found = null; Pattern baseHrefPattern = TagMagix.getPattern("BASE","HREF"); Matcher matcher = baseHrefPattern.matcher(page); int idx = 0; if(matcher.find(idx)) { found = matcher.group(1); if(found.charAt(0) == '"') { found = found.substring(1,found.length()-1); } else if(found.charAt(0) == '\'') { found = found.substring(1,found.length()-1); } } return found; } }
package org.jdesktop.swingx.plaf; import java.util.Arrays; import java.util.List; import javax.swing.BorderFactory; import javax.swing.LookAndFeel; import javax.swing.UIManager; import javax.swing.border.LineBorder; import javax.swing.plaf.BorderUIResource; import org.jdesktop.swingx.JXDatePicker; import org.jdesktop.swingx.plaf.basic.BasicDatePickerUI; import org.jdesktop.swingx.util.OS; /** * @author Joshua Outwater */ public class DatePickerAddon extends AbstractComponentAddon { public DatePickerAddon() { super("JXDatePicker"); } /** * {@inheritDoc} */ @Override protected void addBasicDefaults(LookAndFeelAddons addon, List<Object> defaults) { super.addBasicDefaults(addon, defaults); defaults.addAll(Arrays.asList(new Object[]{ JXDatePicker.uiClassID, BasicDatePickerUI.class.getName(), "JXDatePicker.border", new BorderUIResource(BorderFactory.createCompoundBorder( LineBorder.createGrayLineBorder(), BorderFactory.createEmptyBorder(3, 3, 3, 3))) })); UIManager.getDefaults().addResourceBundle( "org.jdesktop.swingx.plaf.basic.resources.DatePicker"); } /** * {@inheritDoc} */ @Override protected void addWindowsDefaults(LookAndFeelAddons addon, List<Object> defaults) { super.addWindowsDefaults(addon, defaults); defaults.add("JXDatePicker.arrowIcon"); if (OS.isWindowsXP()) { if (OS.isUsingWindowsVisualStyles()) { defaults.add(LookAndFeel.makeIcon(DatePickerAddon.class, "windows/resources/combo-xp.png")); } else { defaults.add(LookAndFeel.makeIcon(DatePickerAddon.class, "windows/resources/combo-w2k.png")); } } } /** * {@inheritDoc} */ @Override protected void addLinuxDefaults(LookAndFeelAddons addon, List<Object> defaults) { super.addLinuxDefaults(addon, defaults); defaults.addAll(Arrays.asList(new Object[] { "JXDatePicker.arrowIcon", LookAndFeel.makeIcon(DatePickerAddon.class, "linux/resources/combo-gtk.png") })); } /** * {@inheritDoc} */ @Override protected void addMacDefaults(LookAndFeelAddons addon, List<Object> defaults) { super.addMacDefaults(addon, defaults); defaults.addAll(Arrays.asList(new Object[] { "JXDatePicker.arrowIcon", LookAndFeel.makeIcon(DatePickerAddon.class, "macosx/resources/combo-osx.png") })); } }
package jp.kobe_u.cspiral.norakore; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Date; import java.util.Calendar; import java.util.List; import java.util.Random; import javax.imageio.ImageIO; import jp.kobe_u.cspiral.norakore.model.*; import jp.kobe_u.cspiral.norakore.util.DBUtils; import org.bson.types.ObjectId; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; import com.sun.jersey.core.util.Base64; public class NorakoreController { private final String NyavatarColl_Name = "nyavatar"; private final String UserColl_Name = "user"; private final String PictureColl_Name = "picture"; private final String IconColl_Name = "icon"; private DBCollection NyavatarColl; private DBCollection UserColl; private DBCollection PictureColl; private DBCollection IconColl; public NorakoreController() { this.NyavatarColl = DBUtils.getInstance().getDb().getCollection(NyavatarColl_Name); this.UserColl = DBUtils.getInstance().getDb().getCollection(UserColl_Name); this.PictureColl = DBUtils.getInstance().getDb().getCollection(PictureColl_Name); this.IconColl = DBUtils.getInstance().getDb().getCollection(IconColl_Name); } public NyavatarList searchNyavatar(double lon, double lat) { final double search_area = 10000; NyavatarList result = new NyavatarList(); List<Nyavatar> list = new ArrayList<Nyavatar>(); DBCursor cursor = NyavatarColl.find(); for (DBObject nya : cursor) { // TODO: mongo, list.add(new Nyavatar(nya)); } result.setList(list); return result; } public NyavatarList getUsersNyavatar(String userID) throws Exception { NyavatarList result = new NyavatarList(); try { // retrieve the specified user's DBObject //DBObject query = new BasicDBObject("_id", new ObjectId(userID)); DBObject query = new BasicDBObject("_id", userID); DBObject userdbo = UserColl.findOne(query); if (userdbo == null) throw new Exception("Specified user is not found."); // get user's nyavatar list BasicDBList id_list = (BasicDBList)userdbo.get("nyavatarList"); if (id_list == null) throw new Exception("user's user is not found."); // generate nyavatar list from id list List<Nyavatar> ny_list = new ArrayList<Nyavatar>(); for(Object id: id_list) { ObjectId oid = new ObjectId((String)id); DBObject ny_dbo = NyavatarColl.findOne(new BasicDBObject("_id", oid)); if (ny_dbo == null) throw new Exception("There is lost-nyavatar on db."); ny_list.add(new Nyavatar(ny_dbo)); } // generate result object result.setList(ny_list); } catch (IllegalArgumentException e) { throw new Exception(MessageFormat.format("Invalid userID, userID={0}", userID)); } return result; } public NyavatarDetail getNyavatarDetail(String nyavatarID, String userID){ NyavatarDetail result = new NyavatarDetail(); DBObject query = new BasicDBObject("_id",new ObjectId(nyavatarID)); DBObject queryResult = NyavatarColl.findOne(query); result.setNyavatarID(queryResult.get("_id").toString()); result.setName((String)queryResult.get("name")); result.setType((String)queryResult.get("type")); result.setPictureID((String)queryResult.get("pictureID")); result.setIconID((String)queryResult.get("iconID")); result.setDate((Date)queryResult.get("date")); result.setLocation(new Location((DBObject)queryResult.get("location"))); // TODO: LostCatIDAPILostCatsID // TODO: say result.setSay((String)queryResult.get("say")); return result; } public String registerNyavatar(String userID, String name, String type, String picture, double lon, double lat) throws Exception { NyavatarDetail nya = new NyavatarDetail(); nya.setName(name); nya.setType(type); if (picture == null || picture.length() == 0) throw new Exception( "Param:picture is not specified."); String picid = saveImage(picture, "picture"); if (picid == "") throw new Exception("saveImage failed."); nya.setPictureID(picid); // iconID String iconid = "nullID"; Random rnd = new Random(); int ran = rnd.nextInt(2); switch(ran){ case 0: iconid = "563374c731b1b0e407093a9f"; case 1: iconid = "563374d831b1b0e408093a9f"; } nya.setIconID(iconid); Location loc = new Location(); loc.setLon(lon); loc.setLat(lat); nya.setLocation(loc); nya.setSay(""); nya.determineParams(userID); // TODO: picture DBObject dbo = nya.toDBObject(); NyavatarColl.insert(dbo); String nya_id = dbo.get("_id").toString(); // TODO: error handling //DBObject query = new BasicDBObject("_id", new ObjectId(userID)); DBObject query = new BasicDBObject("_id", userID); DBObject userdbo = UserColl.findOne(query); if (userdbo == null) return null; BasicDBList list = (BasicDBList)userdbo.get("nyavatarList"); list.add(nya_id); userdbo.put("nyavatarList", list); Double bonitos = (Double)userdbo.get("bonitos") + 10; userdbo.put("bonitos", bonitos); UserColl.update(query, userdbo); return nya_id; } public String saveImage(String data, String res) { DBObject dbo = new BasicDBObject("src", data); if (res.equals("picture")){ PictureColl.save(dbo); }else if (res.equals("icon")){ IconColl.save(dbo); }else return ""; String id = dbo.get("_id").toString(); return id; } public ByteArrayOutputStream getImage(String id, String res) { DBObject query = new BasicDBObject("_id", new ObjectId(id)); String type; DBObject o; if (res.equals("picture")){ o = PictureColl.findOne(query); type = "jpg"; }else if (res.equals("icon")){ o = IconColl.findOne(query); type = "png"; }else return null; if (o == null) return null; String src = (String)o.get("src"); src = src.split(",")[1]; byte[] bytes = Base64.decode(src); try { BufferedImage bImage = ImageIO.read(new ByteArrayInputStream(bytes)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ImageIO.write(bImage, type, baos); return baos; } catch (IOException e) { // TODO catch e.printStackTrace(); } return null; } }
package datatypes.base; import json.JSONToStringI; public class Version implements Comparable, JSONToStringI { protected int major; protected int minor; protected int build; protected int patch; public Version() { this.major = -1; } public Version(int major, int minor, int build, int patch) { this.major = major; this.minor = minor; this.build = build; this.patch = patch; } public Version(String version) throws IllegalArgumentException { String str = version; if(str == null || str.length() == 0) { throw new IllegalArgumentException("version-string is empty or not set"); } String sa[] = str.split("-"); str = sa[0]; try { if(sa.length == 2) { patch = Integer.parseInt(sa[1]); if(patch > 9999) { patch = 9999; } } else if(sa.length > 2) { throw new IllegalArgumentException("invalid Version-string"); } sa = str.split("\\."); major = Integer.parseInt(sa[0]); if(sa.length > 1) { minor = Integer.parseInt(sa[1]); } if(sa.length > 2) { build = Integer.parseInt(sa[2]); } if(sa.length > 3) { patch = Integer.valueOf(sa[3]); } } catch(NumberFormatException e) { throw new IllegalArgumentException("converting failed", e); } } public int compareMajor(Version v) { if(major < v.major) { return -1; } else if(major > v.major) { return 1; } return 0; } public int compareMinor(Version v) { if(minor < v.minor) { return -1; } else if(minor > v.minor) { return 1; } return 0; } @Override public int compareTo(Object o) { Version v = (Version)o; if(major < v.major) { return -1; } if(major > v.major) { return 1; } if(minor < v.minor) { return -1; } if(minor > v.minor) { return 1; } if(build < v.build) { return -1; } if(build > v.build) { return 1; } if(patch < v.patch) { return -1; } if(patch > v.patch) { return 1; } return 0; } @Override public String toString() { if(major == -1) { return "0.0.0-0000"; } StringBuilder b = new StringBuilder(); b.append(String.valueOf(major)); b.append('.'); b.append(String.valueOf(minor)); b.append('.'); b.append(String.valueOf(build)); b.append('-'); if(patch < 10) { b.append("000"); } else if(patch < 100) { b.append("00"); } else if(patch < 1000) { b.append("0"); } b.append(String.valueOf(patch)); return b.toString(); } @Override public String toJsonString(boolean formated, int indent) { if(major == -1) { return "\"0.0.0.0\""; } StringBuilder b = new StringBuilder(); b.append('"'); b.append(String.valueOf(major)); b.append('.'); b.append(String.valueOf(minor)); b.append('.'); b.append(String.valueOf(build)); b.append('.'); b.append(String.valueOf(patch)); b.append('"'); return b.toString(); } }
package magpie.models.regression; import java.util.ArrayList; import java.util.List; import magpie.data.Dataset; import org.apache.commons.math3.stat.regression.MillerUpdatingRegression; import org.apache.commons.math3.stat.regression.UpdatingMultipleLinearRegression; /** * Performs linear regression using all attributes in a Dataset raised to integer * exponents. This method creates models that are nonlinear polynomials, like this: * <center><code>f(x,y,z) = a + b * x + c * x<sup>2</sup> + d * y + e * y<sup>2</sup> + ...</code></center> * * <usage><p><b>Usage</b>: &lt;order> * <br><pr><i>order</i>: Maximum order of terms in polynomial</usage> * * @author Logan Ward * @version 0.1 */ public class PolynomialRegression extends BaseRegression { /** Desired order of polynomial */ int order = 1; /** Number of attributes used in model */ int numAttributes = 0; /** Coefficients of each term in the polynomial */ double[] coefficients = null; /** Names of attributes */ String[] attributeNames; @Override public void setOptions(List Options) throws Exception { try { order = Integer.parseInt(Options.get(0).toString()); } catch (Exception e) { throw new Exception(printUsage()); } } @Override @SuppressWarnings("CloneDeclaresCloneNotSupported") public BaseRegression clone() { PolynomialRegression x = (PolynomialRegression) super.clone(); if (coefficients != null) { x.coefficients = coefficients.clone(); x.attributeNames = attributeNames.clone(); } return x; } @Override public String printUsage() { return "Usage: <order of polynomial>"; } @Override protected void train_protected(Dataset TrainData) { if (TrainData.NAttributes() > 10) { System.err.println("WARNING: PolynomialRegression was not intended to be used data with many attributes." + " Your program could be using a lot of memory right now. Consider using an attribute selector."); } // Extract necessary data double[][] attributes = TrainData.getAttributeArray(); double[] classVariable = TrainData.getMeasuredClassArray(); numAttributes = TrainData.NAttributes(); attributeNames = TrainData.getAttributeNames(); // Fit a polynomial model coefficients = fitPolynomialModel(attributes, order, classVariable); } @Override public void run_protected(Dataset TrainData) { if (TrainData.NAttributes() != numAttributes) throw new Error("Dataset has more attributes that what was used during training"); // Run the model double[][] attributes = TrainData.getAttributeArray(); double[] result = runPolynomialModel(attributes, order, coefficients); // Store results TrainData.setPredictedClasses(result); } @Override public int getNFittingParameters() { return 1 + numAttributes * order; } /** * Fit a polynomial model based on a matrix of attribute values. Returns the * coefficients of this model in the following order:<p> * Intercept, Coefficient of attribute1, Coefficient of attribute1<sup>2</sup>, ..., * Coefficient of attribute2, ... * @param attributes Matrix containing attributes for each entry (entries are rows, attributes columns) * @param order Desired order of polynomial * @param classVariable Class variable for each entry * @return Coefficients for model */ static public double[] fitPolynomialModel(double[][] attributes, int order, double[] classVariable) { double[][] expandedAttributes = expandAttributes(attributes, order); // Since we are using a UpdatingMultipleLinearRegression, it is not really // necessary to calculated all of the attributes at one time. But, // it is easier to do so and this method is not important enough to // spend time making it marginally more effiicent. UpdatingMultipleLinearRegression Fit = new MillerUpdatingRegression(expandedAttributes[0].length, false); Fit.addObservations(expandedAttributes, classVariable); // Get the results of the fit double[] parameters = Fit.regress().getParameterEstimates(); // Set anything that is NaN (if it was not used in fit) to 0.0 for (int i=0; i<parameters.length; i++) if (parameters[i] == Double.NaN) parameters[i] = 0.0; return parameters; } /** * Expand attribute array in order to allow it to be used to fit a polynomial * model. First column is all ones (for the intercept), other columns are for * terms listed in the same order as {@linkplain #fitPolynomialModel(double[][], int, double[]) }. * @param attributes Matrix containing attributes for each entry (entries are rows, attributes columns) * @param order Desired order of polynomial * @return Matrix that can be used to fit this model */ static public double[][] expandAttributes(double[][] attributes, int order) { double[][] output = new double[attributes.length][attributes[0].length * order + 1]; for (int e=0; e < attributes.length; e++) { int count = 0; output[e][count++] = 1; for (int a=0; a < attributes[0].length; a++) { double attrValue = attributes[e][a]; // Attribute to the first power output[e][count++] = attrValue; for (int o=1; o < order; o++) { // Attribute to the subsequent powers output[e][count] = output[e][count - 1] * attrValue; count++; } } } return output; } /** * Run a polynomial model based on a matrix of attribute values. Coefficients * coefficients of this model in the following order:<p> * Intercept, Coefficient of attribute1, Coefficient of attribute1<sup>2</sup>, ..., * Coefficient of attribute2, ... * @param attributes Matrix containing attributes for each entry (entries are rows, attributes columns) * @param order Desired order of polynomial * @param coefficients Coefficients of polynomial model * @return Value of model for each entry */ static public double[] runPolynomialModel(double[][] attributes, int order, double[] coefficients) { double[][] expandedAttributes = expandAttributes(attributes, order); double[] output = new double[attributes.length]; // Evaluate model for (int e=0; e<output.length; e++) { output[e] = coefficients[0]; for (int i=1; i<coefficients.length; i++) output[e] += expandedAttributes[e][i] * coefficients[i]; } return output; } @Override protected String printModel_protected() { String output = String.format("%.3e", coefficients[0]); int count=1; for (int a=0; a<numAttributes; a++) { for (int o=1; o<=order; o++) { output += String.format(" + %.3e * %s ^ %d", coefficients[count++], attributeNames[a], o); if (count % 4 == 0) output += "\n\t"; } } return output + "\n"; } }
package com.faveset.khttp.ssl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.math.BigInteger; import java.security.cert.Certificate; import java.security.cert.CertificateFactory; import java.security.GeneralSecurityException; import java.security.NoSuchAlgorithmException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.PrivateKey; import java.security.Signature; import java.util.Date; import org.spongycastle.asn1.ASN1ObjectIdentifier; import org.spongycastle.asn1.x509.AlgorithmIdentifier; import org.spongycastle.asn1.x509.SubjectPublicKeyInfo; import org.spongycastle.asn1.x500.X500Name; import org.spongycastle.cert.X509CertificateHolder; import org.spongycastle.cert.X509v3CertificateBuilder; import org.spongycastle.operator.ContentSigner; public class CertificateBuilder { public static class DistinguishedName { private static final String sDetailDefault = "Unknown"; private static String[] sLabels = { "C", "ST", "L", "O", "OU", "CN", "emailAddress", }; private String mCommonName = sDetailDefault; private String mCountry = sDetailDefault; private String mEmail = sDetailDefault; private String mLocality = sDetailDefault; private String mOrgName = sDetailDefault; private String mOrgUnit = sDetailDefault; private String mState = sDetailDefault; // This must correspond to the order in sLabels. private String[] mComponents = new String[]{ mCountry, mState, mLocality, mOrgName, mOrgUnit, mCommonName, mEmail, }; public DistinguishedName() { } /** * Set to null to omit on output. */ public DistinguishedName setCommonName(String name) { mCommonName = name; return this; } /** * Set to null to omit on output. */ public DistinguishedName setCountry(String name) { mCountry = name; return this; } /** * Set to null to omit on output. */ public DistinguishedName setEmail(String email) { mEmail = email; return this; } /** * Set to null to omit on output. */ public DistinguishedName setLocality(String locality) { mLocality = locality; return this; } /** * Set to null to omit on output. */ public DistinguishedName setOrgName(String orgName) { mOrgName = orgName; return this; } /** * Set to null to omit on output. */ public DistinguishedName setOrgUnit(String orgUnit) { mOrgUnit = orgUnit; return this; } /** * Set to null to omit on output. */ public DistinguishedName setState(String state) { mState = state; return this; } public String toString() { StringBuilder builder = new StringBuilder(); boolean first = true; for (int ii = 0; ii < mComponents.length; ii++) { String elem = mComponents[ii]; if (elem == null) { continue; } if (first) { first = false; } else { builder.append(", "); } String label = sLabels[ii]; builder.append(label + "=" + elem); } return builder.toString(); } } private static class Signer implements ContentSigner { private static final String sSigAlgorithm = "SHA256withRSA"; // OID for sha256WithRSAEncryption private static final String sSigAlgorithmOid = "1.2.840.113549.1.1.11"; private static final AlgorithmIdentifier sSigAlgorithmIdentifier = new AlgorithmIdentifier(new ASN1ObjectIdentifier(sSigAlgorithmOid)); private ByteArrayOutputStream mOutputStream; private Signature mSignature; public Signer(PrivateKey privKey) throws IllegalArgumentException { mOutputStream = new ByteArrayOutputStream(); try { mSignature = Signature.getInstance(sSigAlgorithm); mSignature.initSign(privKey); } catch (GeneralSecurityException e) { throw new IllegalArgumentException(e.getMessage()); } } @Override public AlgorithmIdentifier getAlgorithmIdentifier() { return sSigAlgorithmIdentifier; } @Override public OutputStream getOutputStream() { return mOutputStream; } @Override public byte[] getSignature() { try { mSignature.update(mOutputStream.toByteArray()); return mSignature.sign(); } catch (GeneralSecurityException e) { throw new RuntimeException(e); } } } private static final String sKeyAlgorithm = "RSA"; // 2048 bit keys. private static final int sKeySizeDefault = 2048; // 1 year in millis. By default, mNotAfter will be sent to now + sDefaultExpireMillis. private static long sDefaultExpireMillis = 365 * 24 * 60 * 60 * 1000L; private int mKeySize; private DistinguishedName mIssuer; private DistinguishedName mSubject; private BigInteger mSerial = BigInteger.ZERO; private Date mNotBefore; private Date mNotAfter; public CertificateBuilder() { long now = System.currentTimeMillis(); mKeySize = sKeySizeDefault; mIssuer = new DistinguishedName(); mSubject = new DistinguishedName(); mNotBefore = new Date(now); mNotAfter = new Date(now + sDefaultExpireMillis); } public Certificate build() throws IOException, IllegalArgumentException { // Generate a public key pair for the issuer. KeyPairGenerator gen; try { gen = KeyPairGenerator.getInstance(sKeyAlgorithm); } catch (NoSuchAlgorithmException e) { // RSA algorithm always exists. throw new RuntimeException(e); } gen.initialize(mKeySize); KeyPair keyPair = gen.generateKeyPair(); X500Name issuer = new X500Name(mIssuer.toString()); X500Name subject = new X500Name(mSubject.toString()); SubjectPublicKeyInfo pubKeyInfo = SubjectPublicKeyInfo.getInstance(keyPair.getPublic().getEncoded()); X509v3CertificateBuilder builder = new X509v3CertificateBuilder(issuer, mSerial, mNotBefore, mNotAfter, subject, pubKeyInfo); byte[] certBytes = builder.build(new Signer(keyPair.getPrivate())).getEncoded(); try { CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); return certFactory.generateCertificate(new ByteArrayInputStream(certBytes)); } catch (GeneralSecurityException e) { // X.509 support always exists. Moreover, the builder will always generate a valid // X.509 certificate. throw new RuntimeException(e); } } public CertificateBuilder.DistinguishedName getIssuer() { return mIssuer; } public CertificateBuilder.DistinguishedName getSubject() { return mSubject; } public CertificateBuilder setNotAfter(Date notAfter) { mNotAfter = notAfter; return this; } public CertificateBuilder setNotBefore(Date notBefore) { mNotBefore = notBefore; return this; } /** * RSA key size in bits. * * @param numBits must be 1024 or 2048. */ public CertificateBuilder setKeySize(int numBits) { mKeySize = numBits; return this; } public CertificateBuilder setSerial(BigInteger serial) { mSerial = serial; return this; } }
package hivemall.regression; import static hivemall.HivemallConstants.BIGINT_TYPE_NAME; import static hivemall.HivemallConstants.INT_TYPE_NAME; import static hivemall.HivemallConstants.STRING_TYPE_NAME; import hivemall.LearnerBaseUDTF; import hivemall.io.FeatureValue; import hivemall.io.IWeightValue; import hivemall.io.PredictionModel; import hivemall.io.PredictionResult; import hivemall.io.WeightValue; import hivemall.io.WeightValue.WeightValueWithCovar; import hivemall.utils.collections.IMapIterator; import hivemall.utils.hadoop.HiveUtils; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.io.FloatWritable; public abstract class OnlineRegressionUDTF extends LearnerBaseUDTF { private static final Log logger = LogFactory.getLog(OnlineRegressionUDTF.class); protected ListObjectInspector featureListOI; protected PrimitiveObjectInspector featureInputOI; protected FloatObjectInspector targetOI; protected boolean parseFeature; protected PredictionModel model; protected int count; @Override public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { if(argOIs.length < 2) { throw new UDFArgumentException(getClass().getSimpleName() + " takes 2 arguments: List<Int|BigInt|Text> features, float target [, constant string options]"); } this.featureInputOI = processFeaturesOI(argOIs[0]); this.targetOI = (FloatObjectInspector) argOIs[1]; processOptions(argOIs); PrimitiveObjectInspector featureOutputOI = dense_model ? PrimitiveObjectInspectorFactory.javaIntObjectInspector : featureInputOI; this.model = createModel(); if(preloadedModelFile != null) { loadPredictionModel(model, preloadedModelFile, featureOutputOI); } this.count = 0; return getReturnOI(featureOutputOI); } protected PrimitiveObjectInspector processFeaturesOI(ObjectInspector arg) throws UDFArgumentException { this.featureListOI = (ListObjectInspector) arg; ObjectInspector featureRawOI = featureListOI.getListElementObjectInspector(); String keyTypeName = featureRawOI.getTypeName(); if(!STRING_TYPE_NAME.equals(keyTypeName) && !INT_TYPE_NAME.equals(keyTypeName) && !BIGINT_TYPE_NAME.equals(keyTypeName)) { throw new UDFArgumentTypeException(0, "1st argument must be List of key type [Int|BitInt|Text]: " + keyTypeName); } this.parseFeature = STRING_TYPE_NAME.equals(keyTypeName); return HiveUtils.asPrimitiveObjectInspector(featureRawOI); } protected StructObjectInspector getReturnOI(ObjectInspector featureOutputOI) { ArrayList<String> fieldNames = new ArrayList<String>(); ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(); fieldNames.add("feature"); ObjectInspector featureOI = ObjectInspectorUtils.getStandardObjectInspector(featureOutputOI); fieldOIs.add(featureOI); fieldNames.add("weight"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector); if(useCovariance()) { fieldNames.add("covar"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector); } return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); } @Override public void process(Object[] args) throws HiveException { List<?> features = (List<?>) featureListOI.getList(args[0]); if(features.isEmpty()) { return; } float target = targetOI.get(args[1]); checkTargetValue(target); count++; train(features, target); } protected void checkTargetValue(float target) throws UDFArgumentException {} protected void train(final Collection<?> features, final float target) { float p = predict(features); update(features, target, p); } protected float predict(final Collection<?> features) { final ObjectInspector featureInspector = this.featureInputOI; final boolean parseFeature = this.parseFeature; float score = 0.f; for(Object f : features) {// a += w[i] * x[i] if(f == null) { continue; } final Object k; final float v; if(parseFeature) { FeatureValue fv = FeatureValue.parse(f); k = fv.getFeature(); v = fv.getValue(); } else { k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector); v = 1.f; } float old_w = model.getWeight(k); if(old_w != 0f) { score += (old_w * v); } } return score; } protected PredictionResult calcScoreAndNorm(Collection<?> features) { final ObjectInspector featureInspector = this.featureInputOI; final boolean parseX = this.parseFeature; float score = 0.f; float squared_norm = 0.f; for(Object f : features) {// a += w[i] * x[i] if(f == null) { continue; } final Object k; final float v; if(parseX) { FeatureValue fv = FeatureValue.parse(f); k = fv.getFeature(); v = fv.getValue(); } else { k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector); v = 1.f; } float old_w = model.getWeight(k); if(old_w != 0f) { score += (old_w * v); } squared_norm += (v * v); } return new PredictionResult(score).squaredNorm(squared_norm); } protected PredictionResult calcScoreAndVariance(Collection<?> features) { final ObjectInspector featureInspector = featureListOI.getListElementObjectInspector(); final boolean parseFeature = this.parseFeature; float score = 0.f; float variance = 0.f; for(Object f : features) {// a += w[i] * x[i] if(f == null) { continue; } final Object k; final float v; if(parseFeature) { FeatureValue fv = FeatureValue.parse(f); k = fv.getFeature(); v = fv.getValue(); } else { k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector); v = 1.f; } IWeightValue old_w = model.get(k); if(old_w == null) { variance += (1.f * v * v); } else { score += (old_w.get() * v); variance += (old_w.getCovariance() * v * v); } } return new PredictionResult(score).variance(variance); } protected void update(Collection<?> features, float target, float predicted) { float d = computeUpdate(target, predicted); update(features, d); } protected float computeUpdate(float target, float predicted) { throw new IllegalStateException(); } protected void update(Collection<?> features, float coeff) { final ObjectInspector featureInspector = this.featureInputOI; for(Object f : features) {// w[i] += y * x[i] if(f == null) { continue; } final Object x; final float xi; if(parseFeature) { FeatureValue fv = FeatureValue.parse(f); x = fv.getFeature(); xi = fv.getValue(); } else { x = ObjectInspectorUtils.copyToStandardObject(f, featureInspector); xi = 1.f; } float old_w = model.getWeight(x); float new_w = old_w + (coeff * xi); model.set(x, new WeightValue(new_w)); } } @Override public final void close() throws HiveException { super.close(); if(model != null) { int numForwarded = 0; if(useCovariance()) { final WeightValueWithCovar probe = new WeightValueWithCovar(); final Object[] forwardMapObj = new Object[3]; final FloatWritable fv = new FloatWritable(); final FloatWritable cov = new FloatWritable(); final IMapIterator<Object, IWeightValue> itor = model.entries(); while(itor.next() != -1) { itor.getValue(probe); if(!probe.isTouched()) { continue; // skip outputting untouched weights } Object k = itor.getKey(); fv.set(probe.get()); cov.set(probe.getCovariance()); forwardMapObj[0] = k; forwardMapObj[1] = fv; forwardMapObj[2] = cov; forward(forwardMapObj); numForwarded++; } } else { final WeightValue probe = new WeightValue(); final Object[] forwardMapObj = new Object[2]; final FloatWritable fv = new FloatWritable(); final IMapIterator<Object, IWeightValue> itor = model.entries(); while(itor.next() != -1) { itor.getValue(probe); if(!probe.isTouched()) { continue; // skip outputting untouched weights } Object k = itor.getKey(); fv.set(probe.get()); forwardMapObj[0] = k; forwardMapObj[1] = fv; forward(forwardMapObj); numForwarded++; } } int numMixed = model.getNumMixed(); this.model = null; logger.info("Trained a prediction model using " + count + " training examples" + (numMixed > 0 ? "( numMixed: " + numMixed + " )" : "")); logger.info("Forwarded the prediction model of " + numForwarded + " rows"); } } }
package at.co.federmann.gtd.domain; import org.apache.commons.lang3.StringUtils; public enum Progress { // activity has been created and has not been processed since then OPEN(0, "progress.open"), // activity has been tackled IN_PROGRESS(1, "progress.in_progress"), // after having worked on the activity it was postponed to be completed at a later time POSTPONED(2, "progress.postponed"), // the execution of this task is delayed until some dependency has been taken care of WAITING(3, "progress.waiting"), // to handle this activity the activiy name and / or description need to be clarified CLARIFY(4, "progress.clarify"), // this activity will not be handled with for some reason REJECTED(5, "progress.rejected"), // this activity has become obsolete before it was delt with CLOSED(6, "progress.closed"), // this activity was delt with in a complete way DONE(7, "progress.done"), // this activity had to be reopened after it was delt with already REOPENED(8, "progress.reopened"); private final Integer id; private final String i18nKey; private Progress(Integer id, String i18nKey) { this.id = id; this.i18nKey = i18nKey; } public Integer getId() { return id; } public String getI18nKey() { return i18nKey; } public static Progress resolveProgress(Integer id) { if (id == null) { return null; } for (Progress progress : Progress.values()) { if (id.equals(progress.getId())) { return progress; } } throw new IllegalArgumentException(StringUtils.join("No matching Progress found for ID ", id)); } }